diff --git a/.ci/gcb-community-checker.yml b/.ci/gcb-community-checker.yml index 37b2e4955ca1..0eac9a4666e7 100644 --- a/.ci/gcb-community-checker.yml +++ b/.ci/gcb-community-checker.yml @@ -61,7 +61,7 @@ steps: - name: 'gcr.io/graphite-docker-images/go-plus' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: community-checker - secretEnv: ["GITHUB_TOKEN", "GENERATE_DIFFS_TRIGGER"] + secretEnv: ["GITHUB_TOKEN_MAGIC_MODULES", "GENERATE_DIFFS_TRIGGER"] timeout: 8000s args: - "community-checker" @@ -74,7 +74,7 @@ steps: availableSecrets: secretManager: - - versionName: projects/673497134629/secrets/github-magician-token/versions/latest - env: GITHUB_TOKEN + - versionName: projects/673497134629/secrets/github-magician-token-generate-diffs-magic-modules/versions/latest + env: GITHUB_TOKEN_MAGIC_MODULES - versionName: projects/673497134629/secrets/ci-trigger-generate-diffs/versions/latest env: GENERATE_DIFFS_TRIGGER diff --git a/.ci/gcb-contributor-membership-checker.yml b/.ci/gcb-contributor-membership-checker.yml index c7b2cc3cc89a..f40bc6d69c5b 100644 --- a/.ci/gcb-contributor-membership-checker.yml +++ b/.ci/gcb-contributor-membership-checker.yml @@ -62,7 +62,7 @@ steps: entrypoint: "/workspace/.ci/scripts/go-plus/magician/exec.sh" id: contributor-membership-checker secretEnv: - ["GITHUB_TOKEN", "GENERATE_DIFFS_TRIGGER", "COMMUNITY_CHECKER_TRIGGER"] + ["GITHUB_TOKEN_MAGIC_MODULES", "GENERATE_DIFFS_TRIGGER", "COMMUNITY_CHECKER_TRIGGER"] timeout: 8000s args: - "membership-checker" @@ -75,8 +75,8 @@ steps: availableSecrets: secretManager: - - versionName: projects/673497134629/secrets/github-magician-token/versions/latest - env: GITHUB_TOKEN + - versionName: projects/673497134629/secrets/github-magician-token-generate-diffs-magic-modules/versions/latest + env: GITHUB_TOKEN_MAGIC_MODULES - versionName: projects/673497134629/secrets/ci-trigger-generate-diffs/versions/latest env: GENERATE_DIFFS_TRIGGER - versionName: projects/673497134629/secrets/ci-trigger-community-checker/versions/latest diff --git a/.ci/gcb-generate-diffs-new.yml b/.ci/gcb-generate-diffs-new.yml index c19d5c5ebdd9..e775b5a37d39 100644 --- a/.ci/gcb-generate-diffs-new.yml +++ b/.ci/gcb-generate-diffs-new.yml @@ -72,7 +72,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: tpg-head - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS"] waitFor: ["build-magician-binary"] env: - BASE_BRANCH=$_BASE_BRANCH @@ -86,7 +86,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: tpg-base - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS"] waitFor: ["build-magician-binary"] env: - BASE_BRANCH=$_BASE_BRANCH @@ -99,7 +99,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS"] id: tpgb-head waitFor: ["build-magician-binary"] env: @@ -114,7 +114,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: tpgb-base - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS"] waitFor: ["build-magician-binary"] env: - BASE_BRANCH=$_BASE_BRANCH @@ -128,7 +128,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: tgc-head - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS"] waitFor: ["build-magician-binary"] env: - BASE_BRANCH=$_BASE_BRANCH @@ -142,7 +142,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: tgc-base - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS"] waitFor: ["build-magician-binary"] env: - BASE_BRANCH=$_BASE_BRANCH @@ -156,7 +156,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: tf-oics-head - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS"] waitFor: ["build-magician-binary"] env: - BASE_BRANCH=$_BASE_BRANCH @@ -170,7 +170,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: tf-oics-base - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS"] waitFor: ["build-magician-binary"] env: - BASE_BRANCH=$_BASE_BRANCH @@ -184,7 +184,7 @@ steps: - name: 'gcr.io/graphite-docker-images/go-plus' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' id: diff - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS", "GITHUB_TOKEN_MAGIC_MODULES"] args: - 'generate-comment' env: @@ -198,7 +198,7 @@ steps: id: tgc-test allowFailure: true entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_MAGIC_MODULES"] waitFor: ["tpgb-head", "tpgb-base", "tgc-head", "tgc-base"] args: - 'test-tgc' @@ -210,7 +210,7 @@ steps: id: tgc-test-integration entrypoint: '/workspace/.ci/scripts/go-plus/tgc-tester-integration/test_tgc_integration.sh' allowFailure: true - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_MAGIC_MODULES"] waitFor: ["tpgb-head", "tpgb-base", "tgc-head", "tgc-base"] env: - TEST_PROJECT=$_VALIDATOR_TEST_PROJECT @@ -229,7 +229,7 @@ steps: id: tpgb-test allowFailure: true entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_MAGIC_MODULES"] waitFor: ["tpgb-head", "tpgb-base"] args: - 'test-tpg' @@ -242,7 +242,7 @@ steps: id: tpg-test allowFailure: true entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_MAGIC_MODULES"] waitFor: ["tpg-head", "tpg-base"] args: - 'test-tpg' @@ -254,7 +254,7 @@ steps: - name: 'gcr.io/graphite-docker-images/go-plus' id: gcb-tpg-vcr-test entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN", "GOOGLE_BILLING_ACCOUNT", "GOOGLE_CUST_ID", "GOOGLE_FIRESTORE_PROJECT", "GOOGLE_IDENTITY_USER", "GOOGLE_MASTER_BILLING_ACCOUNT", "GOOGLE_ORG", "GOOGLE_ORG_2", "GOOGLE_ORG_DOMAIN", "GOOGLE_PROJECT", "GOOGLE_PROJECT_NUMBER", "GOOGLE_SERVICE_ACCOUNT", "SA_KEY", "GOOGLE_PUBLIC_AVERTISED_PREFIX_DESCRIPTION", "GOOGLE_TPU_V2_VM_RUNTIME_VERSION"] + secretEnv: ["GITHUB_TOKEN_DOWNSTREAMS", "GITHUB_TOKEN_MAGIC_MODULES", "GOOGLE_BILLING_ACCOUNT", "GOOGLE_CUST_ID", "GOOGLE_FIRESTORE_PROJECT", "GOOGLE_IDENTITY_USER", "GOOGLE_MASTER_BILLING_ACCOUNT", "GOOGLE_ORG", "GOOGLE_ORG_2", "GOOGLE_ORG_DOMAIN", "GOOGLE_PROJECT", "GOOGLE_PROJECT_NUMBER", "GOOGLE_SERVICE_ACCOUNT", "SA_KEY", "GOOGLE_PUBLIC_AVERTISED_PREFIX_DESCRIPTION", "GOOGLE_TPU_V2_VM_RUNTIME_VERSION"] waitFor: ["diff"] env: - BASE_BRANCH=$_BASE_BRANCH @@ -271,7 +271,7 @@ steps: - name: 'gcr.io/graphite-docker-images/go-plus' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_MAGIC_MODULES"] waitFor: ["diff"] args: - 'request-service-reviewers' @@ -284,8 +284,10 @@ options: availableSecrets: secretManager: - - versionName: projects/673497134629/secrets/github-magician-token/versions/latest - env: GITHUB_TOKEN + - versionName: projects/673497134629/secrets/github-magician-token-generate-diffs-downstreams/versions/latest + env: GITHUB_TOKEN_DOWNSTREAMS + - versionName: projects/673497134629/secrets/github-magician-token-generate-diffs-magic-modules/versions/latest + env: GITHUB_TOKEN_MAGIC_MODULES - versionName: projects/673497134629/secrets/ci-test-billing-account/versions/latest env: GOOGLE_BILLING_ACCOUNT - versionName: projects/673497134629/secrets/ci-test-cust-id/versions/latest diff --git a/.ci/gcb-push-downstream.yml b/.ci/gcb-push-downstream.yml index 0f325a15a17c..9b0b58493ab5 100644 --- a/.ci/gcb-push-downstream.yml +++ b/.ci/gcb-push-downstream.yml @@ -33,7 +33,6 @@ steps: - name: 'gcr.io/graphite-docker-images/bash-plus' entrypoint: '/workspace/.ci/scripts/bash-plus/downstream-waiter/wait_for_commit.sh' id: tpg-sync - secretEnv: ["GITHUB_TOKEN"] waitFor: ["checkout"] args: - 'tpg-sync' @@ -42,7 +41,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_CLASSIC"] id: tpg-push waitFor: ["tpg-sync", "build-magician-binary"] env: @@ -56,22 +55,21 @@ steps: - name: 'gcr.io/cloud-builders/git' waitFor: ["tpg-push"] - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_CLASSIC"] entrypoint: 'bash' args: - -c - | if [ "$BRANCH_NAME" == "main" ]; then - git push https://modular-magician:$$GITHUB_TOKEN@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tpg-sync + git push https://modular-magician:$$GITHUB_TOKEN_CLASSIC@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tpg-sync else - git push https://modular-magician:$$GITHUB_TOKEN@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tpg-sync-$BRANCH_NAME + git push https://modular-magician:$$GITHUB_TOKEN_CLASSIC@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tpg-sync-$BRANCH_NAME fi # TPGB - name: 'gcr.io/graphite-docker-images/bash-plus' entrypoint: '/workspace/.ci/scripts/bash-plus/downstream-waiter/wait_for_commit.sh' id: tpgb-sync - secretEnv: ["GITHUB_TOKEN"] waitFor: ["checkout"] args: - 'tpgb-sync' @@ -80,7 +78,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_CLASSIC"] id: tpgb-push waitFor: ["tpgb-sync", "build-magician-binary"] env: @@ -94,22 +92,21 @@ steps: - name: 'gcr.io/cloud-builders/git' waitFor: ["tpgb-push"] - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_CLASSIC"] entrypoint: 'bash' args: - -c - | if [ "$BRANCH_NAME" == "main" ]; then - git push https://modular-magician:$$GITHUB_TOKEN@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tpgb-sync + git push https://modular-magician:$$GITHUB_TOKEN_CLASSIC@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tpgb-sync else - git push https://modular-magician:$$GITHUB_TOKEN@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tpgb-sync-$BRANCH_NAME + git push https://modular-magician:$$GITHUB_TOKEN_CLASSIC@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tpgb-sync-$BRANCH_NAME fi # TGC - name: 'gcr.io/graphite-docker-images/bash-plus' entrypoint: '/workspace/.ci/scripts/bash-plus/downstream-waiter/wait_for_commit.sh' id: tgc-sync - secretEnv: ["GITHUB_TOKEN"] waitFor: ["checkout"] args: - 'tgc-sync' @@ -118,7 +115,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_CLASSIC"] id: tgc-push waitFor: ["tgc-sync", "tpgb-push"] env: @@ -132,22 +129,21 @@ steps: - name: 'gcr.io/cloud-builders/git' waitFor: ["tgc-push"] - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_CLASSIC"] entrypoint: 'bash' args: - -c - | if [ "$BRANCH_NAME" == "main" ]; then - git push https://modular-magician:$$GITHUB_TOKEN@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tgc-sync + git push https://modular-magician:$$GITHUB_TOKEN_CLASSIC@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tgc-sync else - git push https://modular-magician:$$GITHUB_TOKEN@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tgc-sync-$BRANCH_NAME + git push https://modular-magician:$$GITHUB_TOKEN_CLASSIC@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tgc-sync-$BRANCH_NAME fi # TF-OICS - name: 'gcr.io/graphite-docker-images/bash-plus' entrypoint: '/workspace/.ci/scripts/bash-plus/downstream-waiter/wait_for_commit.sh' id: tf-oics-sync - secretEnv: ["GITHUB_TOKEN"] waitFor: ["checkout"] args: - 'tf-oics-sync' @@ -156,7 +152,7 @@ steps: - name: 'gcr.io/graphite-docker-images/build-environment' entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_CLASSIC"] id: tf-oics-push waitFor: ["tf-oics-sync", "build-magician-binary"] env: @@ -170,20 +166,20 @@ steps: - name: 'gcr.io/cloud-builders/git' waitFor: ["tf-oics-push"] - secretEnv: ["GITHUB_TOKEN"] + secretEnv: ["GITHUB_TOKEN_CLASSIC"] entrypoint: 'bash' args: - -c - | if [ "$BRANCH_NAME" == "main" ]; then - git push https://modular-magician:$$GITHUB_TOKEN@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tf-oics-sync + git push https://modular-magician:$$GITHUB_TOKEN_CLASSIC@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tf-oics-sync else - git push https://modular-magician:$$GITHUB_TOKEN@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tf-oics-sync-$BRANCH_NAME + git push https://modular-magician:$$GITHUB_TOKEN_CLASSIC@github.com/GoogleCloudPlatform/magic-modules $COMMIT_SHA:tf-oics-sync-$BRANCH_NAME fi - name: 'gcr.io/graphite-docker-images/go-plus' entrypoint: '/workspace/.ci/scripts/go-plus/vcr-cassette-merger/vcr_merge.sh' - secretEnv: ["GITHUB_TOKEN", "GOOGLE_PROJECT"] + secretEnv: ["GITHUB_TOKEN_CLASSIC", "GOOGLE_PROJECT"] id: vcr-merge waitFor: ["tpg-push"] env: @@ -196,7 +192,7 @@ steps: waitFor: ["vcr-merge"] entrypoint: '/workspace/.ci/scripts/go-plus/magician/exec.sh' secretEnv: - - "GITHUB_TOKEN" + - "GITHUB_TOKEN_DOWNSTREAMS" - "GOOGLE_BILLING_ACCOUNT" - "GOOGLE_CUST_ID" - "GOOGLE_FIRESTORE_PROJECT" @@ -228,7 +224,9 @@ logsBucket: 'gs://cloudbuild-downstream-builder-logs' availableSecrets: secretManager: - versionName: projects/673497134629/secrets/github-classic--repo-workflow/versions/latest - env: GITHUB_TOKEN + env: GITHUB_TOKEN_CLASSIC + - versionName: projects/673497134629/secrets/github-magician-token-generate-diffs-downstreams/versions/latest + env: GITHUB_TOKEN_DOWNSTREAMS - versionName: projects/673497134629/secrets/ci-test-billing-account/versions/latest env: GOOGLE_BILLING_ACCOUNT - versionName: projects/673497134629/secrets/ci-test-cust-id/versions/latest diff --git a/.ci/gcb-vcr-nightly.yml b/.ci/gcb-vcr-nightly.yml index fb3bbf8663c3..3664d6312d34 100644 --- a/.ci/gcb-vcr-nightly.yml +++ b/.ci/gcb-vcr-nightly.yml @@ -42,4 +42,4 @@ availableSecrets: - versionName: projects/673497134629/secrets/ci-test-public-advertised-prefix-description/versions/latest env: GOOGLE_PUBLIC_AVERTISED_PREFIX_DESCRIPTION - versionName: projects/673497134629/secrets/ci-test-tpu-v2-vm-runtime-version/versions/latest - env: GOOGLE_TPU_V2_VM_RUNTIME_VERSION \ No newline at end of file + env: GOOGLE_TPU_V2_VM_RUNTIME_VERSION diff --git a/.ci/infra/terraform/main.tf b/.ci/infra/terraform/main.tf index 7c1fc897bfa3..439d37f5e8b3 100644 --- a/.ci/infra/terraform/main.tf +++ b/.ci/infra/terraform/main.tf @@ -168,6 +168,7 @@ module "project-services" { "apikeys.googleapis.com", "appengine.googleapis.com", "appengineflex.googleapis.com", + "apphub.googleapis.com", "artifactregistry.googleapis.com", "assuredworkloads.googleapis.com", "autoscaling.googleapis.com", @@ -196,6 +197,7 @@ module "project-services" { "cloudidentity.googleapis.com", "cloudiot.googleapis.com", "cloudkms.googleapis.com", + "cloudquotas.googleapis.com", "cloudresourcemanager.googleapis.com", "cloudscheduler.googleapis.com", "cloudtasks.googleapis.com", @@ -222,6 +224,7 @@ module "project-services" { "datastream.googleapis.com", "deploymentmanager.googleapis.com", "dialogflow.googleapis.com", + "discoveryengine.googleapis.com", "dlp.googleapis.com", "dns.googleapis.com", "documentai.googleapis.com", diff --git a/.ci/magician/cmd/check_cassettes.go b/.ci/magician/cmd/check_cassettes.go index 321bb83eda69..d31cebe44ddd 100644 --- a/.ci/magician/cmd/check_cassettes.go +++ b/.ci/magician/cmd/check_cassettes.go @@ -13,7 +13,6 @@ import ( var ccEnvironmentVariables = [...]string{ "COMMIT_SHA", - "GITHUB_TOKEN", "GOCACHE", "GOPATH", "GOOGLE_BILLING_ACCOUNT", @@ -56,13 +55,19 @@ var checkCassettesCmd = &cobra.Command{ env[ev] = val } + githubToken, ok := lookupGithubTokenOrFallback("GITHUB_TOKEN_DOWNSTREAMS") + if !ok { + fmt.Println("Did not provide GITHUB_TOKEN_DOWNSTREAMS or GITHUB_TOKEN environment variables") + os.Exit(1) + } + rnr, err := exec.NewRunner() if err != nil { fmt.Println("Error creating Runner: ", err) os.Exit(1) } - ctlr := source.NewController(env["GOPATH"], "modular-magician", env["GITHUB_TOKEN"], rnr) + ctlr := source.NewController(env["GOPATH"], "modular-magician", githubToken, rnr) vt, err := vcr.NewTester(env, rnr) if err != nil { @@ -73,6 +78,14 @@ var checkCassettesCmd = &cobra.Command{ }, } +func lookupGithubTokenOrFallback(tokenName string) (string, bool) { + val, ok := os.LookupEnv(tokenName) + if !ok { + return os.LookupEnv("GITHUB_TOKEN") + } + return val, ok +} + func listCCEnvironmentVariables() string { var result string for i, ev := range ccEnvironmentVariables { diff --git a/.ci/magician/cmd/community_checker.go b/.ci/magician/cmd/community_checker.go index f968ca74abff..9eb82a5e3e2e 100644 --- a/.ci/magician/cmd/community_checker.go +++ b/.ci/magician/cmd/community_checker.go @@ -64,7 +64,12 @@ var communityApprovalCmd = &cobra.Command{ baseBranch := args[5] fmt.Println("Base Branch: ", baseBranch) - gh := github.NewClient() + githubToken, ok := lookupGithubTokenOrFallback("GITHUB_TOKEN_MAGIC_MODULES") + if !ok { + fmt.Println("Did not provide GITHUB_TOKEN_MAGIC_MODULES or GITHUB_TOKEN environment variables") + os.Exit(1) + } + gh := github.NewClient(githubToken) cb := cloudbuild.NewClient() execCommunityChecker(prNumber, commitSha, branchName, headRepoUrl, headBranch, baseBranch, gh, cb) }, diff --git a/.ci/magician/cmd/generate_comment.go b/.ci/magician/cmd/generate_comment.go index f7ad2e5a8b0a..b78bbf35fd3c 100644 --- a/.ci/magician/cmd/generate_comment.go +++ b/.ci/magician/cmd/generate_comment.go @@ -35,7 +35,6 @@ var gcEnvironmentVariables = [...]string{ "BUILD_ID", "BUILD_STEP", "COMMIT_SHA", - "GITHUB_TOKEN", "GOPATH", "HOME", "PATH", @@ -70,13 +69,21 @@ var generateCommentCmd = &cobra.Command{ env[ev] = val } - gh := github.NewClient() + for _, tokenName := range []string{"GITHUB_TOKEN_DOWNSTREAMS", "GITHUB_TOKEN_MAGIC_MODULES"} { + val, ok := lookupGithubTokenOrFallback(tokenName) + if !ok { + fmt.Printf("Did not provide %s or GITHUB_TOKEN environment variable\n", tokenName) + os.Exit(1) + } + env[tokenName] = val + } + gh := github.NewClient(env["GITHUB_TOKEN_MAGIC_MODULES"]) rnr, err := exec.NewRunner() if err != nil { fmt.Println("Error creating a runner: ", err) os.Exit(1) } - ctlr := source.NewController(filepath.Join("workspace", "go"), "modular-magician", env["GITHUB_TOKEN"], rnr) + ctlr := source.NewController(filepath.Join("workspace", "go"), "modular-magician", env["GITHUB_TOKEN_DOWNSTREAMS"], rnr) execGenerateComment(env, gh, rnr, ctlr) }, } diff --git a/.ci/magician/cmd/generate_comment_test.go b/.ci/magician/cmd/generate_comment_test.go index 46f90886f180..c8a1c14ed487 100644 --- a/.ci/magician/cmd/generate_comment_test.go +++ b/.ci/magician/cmd/generate_comment_test.go @@ -28,22 +28,22 @@ func TestExecGenerateComment(t *testing.T) { } ctlr := source.NewController("/mock/dir/go", "modular-magician", "*******", mr) env := map[string]string{ - "BUILD_ID": "build1", - "BUILD_STEP": "17", - "COMMIT_SHA": "sha1", - "GITHUB_TOKEN": "*******", - "PR_NUMBER": "pr1", - "PROJECT_ID": "project1", + "BUILD_ID": "build1", + "BUILD_STEP": "17", + "COMMIT_SHA": "sha1", + "GITHUB_TOKEN_MAGIC_MODULES": "*******", + "PR_NUMBER": "pr1", + "PROJECT_ID": "project1", } diffProcessorEnv := map[string]string{ - "BUILD_ID": "build1", - "BUILD_STEP": "17", - "COMMIT_SHA": "sha1", - "GITHUB_TOKEN": "*******", - "NEW_REF": "auto-pr-pr1", - "OLD_REF": "auto-pr-pr1-old", - "PR_NUMBER": "pr1", - "PROJECT_ID": "project1", + "BUILD_ID": "build1", + "BUILD_STEP": "17", + "COMMIT_SHA": "sha1", + "GITHUB_TOKEN_MAGIC_MODULES": "*******", + "NEW_REF": "auto-pr-pr1", + "OLD_REF": "auto-pr-pr1-old", + "PR_NUMBER": "pr1", + "PROJECT_ID": "project1", } execGenerateComment(env, gh, mr, ctlr) diff --git a/.ci/magician/cmd/generate_downstream.go b/.ci/magician/cmd/generate_downstream.go index 033563bda007..2e812d482069 100644 --- a/.ci/magician/cmd/generate_downstream.go +++ b/.ci/magician/cmd/generate_downstream.go @@ -19,10 +19,15 @@ var changelogExp = regexp.MustCompile("(?s)```release-note.*?```") var gdEnvironmentVariables = [...]string{ "BASE_BRANCH", - "GITHUB_TOKEN", "GOPATH", } +var gdTokenEnvironmentVariables = [...]string{ + "GITHUB_TOKEN_CLASSIC", + "GITHUB_TOKEN_DOWNSTREAMS", + "GITHUB_TOKEN", +} + var generateDownstreamCmd = &cobra.Command{ Use: "generate-downstream", Short: "Run generate downstream", @@ -47,13 +52,34 @@ var generateDownstreamCmd = &cobra.Command{ env[ev] = val } - gh := github.NewClient() + var githubToken string + for _, ev := range gdTokenEnvironmentVariables { + val, ok := os.LookupEnv(ev) + if ok { + env[ev] = val + githubToken = val + break + } + } + + gh := github.NewClient(githubToken) rnr, err := exec.NewRunner() if err != nil { fmt.Println("Error creating a runner: ", err) os.Exit(1) } - ctlr := source.NewController(env["GOPATH"], "modular-magician", env["GITHUB_TOKEN"], rnr) + ctlr := source.NewController(env["GOPATH"], "modular-magician", githubToken, rnr) + oldToken := os.Getenv("GITHUB_TOKEN") + if err := os.Setenv("GITHUB_TOKEN", githubToken); err != nil { + fmt.Println("Error setting GITHUB_TOKEN environment variable: ", err) + os.Exit(1) + } + defer func() { + if err := os.Setenv("GITHUB_TOKEN", oldToken); err != nil { + fmt.Println("Error setting GITHUB_TOKEN environment variable: ", err) + os.Exit(1) + } + }() if len(args) != 4 { fmt.Printf("Wrong number of arguments %d, expected 4\n", len(args)) diff --git a/.ci/magician/cmd/membership_checker.go b/.ci/magician/cmd/membership_checker.go index 4af9c95b81bc..97210105dd08 100644 --- a/.ci/magician/cmd/membership_checker.go +++ b/.ci/magician/cmd/membership_checker.go @@ -72,7 +72,12 @@ var membershipCheckerCmd = &cobra.Command{ baseBranch := args[5] fmt.Println("Base Branch: ", baseBranch) - gh := github.NewClient() + githubToken, ok := lookupGithubTokenOrFallback("GITHUB_TOKEN_MAGIC_MODULES") + if !ok { + fmt.Println("Did not provide GITHUB_TOKEN_MAGIC_MODULES or GITHUB_TOKEN environment variables") + os.Exit(1) + } + gh := github.NewClient(githubToken) cb := cloudbuild.NewClient() execMembershipChecker(prNumber, commitSha, branchName, headRepoUrl, headBranch, baseBranch, gh, cb) }, diff --git a/.ci/magician/cmd/request_reviewer.go b/.ci/magician/cmd/request_reviewer.go index b5deed4e828e..8ed8661a4e61 100644 --- a/.ci/magician/cmd/request_reviewer.go +++ b/.ci/magician/cmd/request_reviewer.go @@ -48,7 +48,12 @@ var requestReviewerCmd = &cobra.Command{ Run: func(cmd *cobra.Command, args []string) { prNumber := args[0] fmt.Println("PR Number: ", prNumber) - gh := github.NewClient() + githubToken, ok := os.LookupEnv("GITHUB_TOKEN") + if !ok { + fmt.Println("Did not provide GITHUB_TOKEN environment variable") + os.Exit(1) + } + gh := github.NewClient(githubToken) execRequestReviewer(prNumber, gh) }, } diff --git a/.ci/magician/cmd/request_service_reviewers.go b/.ci/magician/cmd/request_service_reviewers.go index d1efff417ac5..6ef80c5283c0 100644 --- a/.ci/magician/cmd/request_service_reviewers.go +++ b/.ci/magician/cmd/request_service_reviewers.go @@ -40,7 +40,12 @@ var requestServiceReviewersCmd = &cobra.Command{ prNumber := args[0] fmt.Println("PR Number: ", prNumber) - gh := github.NewClient() + githubToken, ok := lookupGithubTokenOrFallback("GITHUB_TOKEN_MAGIC_MODULES") + if !ok { + fmt.Println("Did not provide GITHUB_TOKEN_MAGIC_MODULES or GITHUB_TOKEN environment variable") + os.Exit(1) + } + gh := github.NewClient(githubToken) execRequestServiceReviewers(prNumber, gh, labeler.EnrolledTeamsYaml) }, } diff --git a/.ci/magician/cmd/test_terraform_vcr.go b/.ci/magician/cmd/test_terraform_vcr.go index 2749ddd586c0..60fda5577d82 100644 --- a/.ci/magician/cmd/test_terraform_vcr.go +++ b/.ci/magician/cmd/test_terraform_vcr.go @@ -15,7 +15,6 @@ import ( ) var ttvEnvironmentVariables = [...]string{ - "GITHUB_TOKEN", "GOCACHE", "GOPATH", "GOOGLE_BILLING_ACCOUNT", @@ -54,18 +53,27 @@ var testTerraformVCRCmd = &cobra.Command{ env[ev] = val } + for _, tokenName := range []string{"GITHUB_TOKEN_DOWNSTREAMS", "GITHUB_TOKEN_MAGIC_MODULES"} { + val, ok := lookupGithubTokenOrFallback(tokenName) + if !ok { + fmt.Printf("Did not provide %s or GITHUB_TOKEN environment variable\n", tokenName) + os.Exit(1) + } + env[tokenName] = val + } + baseBranch := os.Getenv("BASE_BRANCH") if baseBranch == "" { baseBranch = "main" } - gh := github.NewClient() + gh := github.NewClient(env["GITHUB_TOKEN_MAGIC_MODULES"]) rnr, err := exec.NewRunner() if err != nil { fmt.Println("Error creating a runner: ", err) os.Exit(1) } - ctlr := source.NewController(env["GOPATH"], "modular-magician", env["GITHUB_TOKEN"], rnr) + ctlr := source.NewController(env["GOPATH"], "modular-magician", env["GITHUB_TOKEN_DOWNSTREAMS"], rnr) vt, err := vcr.NewTester(env, rnr) if err != nil { diff --git a/.ci/magician/cmd/test_tgc.go b/.ci/magician/cmd/test_tgc.go index 9f0edf2f29bf..5f000d731b15 100644 --- a/.ci/magician/cmd/test_tgc.go +++ b/.ci/magician/cmd/test_tgc.go @@ -36,7 +36,12 @@ var testTGCCmd = &cobra.Command{ commit := os.Getenv("COMMIT_SHA") pr := os.Getenv("PR_NUMBER") - gh := github.NewClient() + githubToken, ok := lookupGithubTokenOrFallback("GITHUB_TOKEN_MAGIC_MODULES") + if !ok { + fmt.Println("Did not provide GITHUB_TOKEN_MAGIC_MODULES or GITHUB_TOKEN environment variables") + os.Exit(1) + } + gh := github.NewClient(githubToken) execTestTGC(commit, pr, gh) }, diff --git a/.ci/magician/cmd/test_tpg.go b/.ci/magician/cmd/test_tpg.go index c8923ad42106..260b5b2a7466 100644 --- a/.ci/magician/cmd/test_tpg.go +++ b/.ci/magician/cmd/test_tpg.go @@ -42,7 +42,12 @@ var testTPGCmd = &cobra.Command{ commit := os.Getenv("COMMIT_SHA") pr := os.Getenv("PR_NUMBER") - gh := github.NewClient() + githubToken, ok := lookupGithubTokenOrFallback("GITHUB_TOKEN_MAGIC_MODULES") + if !ok { + fmt.Println("Did not provide GITHUB_TOKEN_MAGIC_MODULES or GITHUB_TOKEN environment variables") + os.Exit(1) + } + gh := github.NewClient(githubToken) execTestTPG(version, commit, pr, gh) }, diff --git a/.ci/magician/github/init.go b/.ci/magician/github/init.go index a931ad5efca5..d64995aa46b7 100644 --- a/.ci/magician/github/init.go +++ b/.ci/magician/github/init.go @@ -15,22 +15,13 @@ */ package github -import ( - "fmt" - "os" -) - // Client for GitHub interactions. type Client struct { token string } -func NewClient() *Client { - githubToken, ok := os.LookupEnv("GITHUB_TOKEN") - if !ok { - fmt.Println("Did not provide GITHUB_TOKEN environment variable") - os.Exit(1) +func NewClient(token string) *Client { + return &Client{ + token: token, } - - return &Client{token: githubToken} } diff --git a/.ci/magician/github/membership.go b/.ci/magician/github/membership.go index fd151935d98b..908970d1b08f 100644 --- a/.ci/magician/github/membership.go +++ b/.ci/magician/github/membership.go @@ -46,11 +46,7 @@ var ( trustedContributors = []string{} // This is for reviewers who are "on vacation": will not receive new review assignments but will still receive re-requests for assigned PRs. - onVacationReviewers = []string{ - "zli82016", - "NickElliot", - "ScottSuarez", - } + onVacationReviewers = []string{} ) type UserType int64 diff --git a/.ci/magician/vcr/tester.go b/.ci/magician/vcr/tester.go index 592cf1925a9b..46e4a36473c1 100644 --- a/.ci/magician/vcr/tester.go +++ b/.ci/magician/vcr/tester.go @@ -199,7 +199,7 @@ func (vt *Tester) Run(mode Mode, version provider.Version, testDirs []string) (* } var printedEnv string for ev, val := range env { - if ev == "SA_KEY" || ev == "GITHUB_TOKEN" { + if ev == "SA_KEY" || strings.HasPrefix(ev, "GITHUB_TOKEN") { val = "{hidden}" } printedEnv += fmt.Sprintf("%s=%s\n", ev, val) @@ -223,12 +223,13 @@ func (vt *Tester) Run(mode Mode, version provider.Version, testDirs []string) (* logFileName := filepath.Join(vt.baseDir, "testlogs", fmt.Sprintf("%s_test.log", mode.Lower())) // Write output (or error) to test log. // Append to existing log file. - previousLog, _ := vt.rnr.ReadFile(logFileName) - if previousLog != "" { - output = previousLog + "\n" + output + allOutput, _ := vt.rnr.ReadFile(logFileName) + if allOutput != "" { + allOutput += "\n" } - if err := vt.rnr.WriteFile(logFileName, output); err != nil { - return nil, fmt.Errorf("error writing log: %v, test output: %v", err, output) + allOutput += output + if err := vt.rnr.WriteFile(logFileName, allOutput); err != nil { + return nil, fmt.Errorf("error writing log: %v, test output: %v", err, allOutput) } return collectResult(output), testErr } @@ -319,7 +320,7 @@ func (vt *Tester) runInParallel(mode Mode, version provider.Version, testDir, te "-parallel", "1", "-v", - "-run=" + test, + "-run=" + test + "$", "-timeout", replayingTimeout, "-ldflags=-X=github.com/hashicorp/terraform-provider-google-beta/version.ProviderVersion=acc", @@ -473,19 +474,26 @@ func (vt *Tester) printLogs(logPath string) { func collectResult(output string) *Result { matches := testResultsExpression.FindAllStringSubmatch(output, -1) - results := make(map[string][]string, 4) + resultSets := make(map[string]map[string]struct{}, 4) for _, submatches := range matches { if len(submatches) != 3 { fmt.Printf("Warning: unexpected regex match found in test output: %v", submatches) continue } - results[submatches[1]] = append(results[submatches[1]], submatches[2]) + if _, ok := resultSets[submatches[1]]; !ok { + resultSets[submatches[1]] = make(map[string]struct{}) + } + resultSets[submatches[1]][submatches[2]] = struct{}{} } + results := make(map[string][]string, 4) results["PANIC"] = testPanicExpression.FindAllString(output, -1) - sort.Strings(results["FAIL"]) - sort.Strings(results["PASS"]) - sort.Strings(results["SKIP"]) sort.Strings(results["PANIC"]) + for _, kind := range []string{"FAIL", "PASS", "SKIP"} { + for test := range resultSets[kind] { + results[kind] = append(results[kind], test) + } + sort.Strings(results[kind]) + } return &Result{ FailedTests: results["FAIL"], PassedTests: results["PASS"], diff --git a/.ci/scripts/go-plus/tgc-tester-integration/test_tgc_integration.sh b/.ci/scripts/go-plus/tgc-tester-integration/test_tgc_integration.sh index a17d7099ff44..be80ee454f1d 100755 --- a/.ci/scripts/go-plus/tgc-tester-integration/test_tgc_integration.sh +++ b/.ci/scripts/go-plus/tgc-tester-integration/test_tgc_integration.sh @@ -12,7 +12,7 @@ github_username=modular-magician new_branch="auto-pr-$pr_number" -git_remote=https://$github_username:$GITHUB_TOKEN@github.com/$github_username/$gh_repo +git_remote=https://github.com/$github_username/$gh_repo local_path=$GOPATH/src/github.com/GoogleCloudPlatform/$gh_repo mkdir -p "$(dirname $local_path)" git clone $git_remote $local_path --branch $new_branch --depth 2 @@ -36,9 +36,14 @@ post_body=$( jq -n \ --arg state "pending" \ '{context: $context, target_url: $target_url, state: $state}') +# Fall back to old github token if new token is unavailable. +if [[ -z $GITHUB_TOKEN_MAGIC_MODULES ]]; then + GITHUB_TOKEN_MAGIC_MODULES=$GITHUB_TOKEN +fi + curl \ -X POST \ - -u "$github_username:$GITHUB_TOKEN" \ + -u "$github_username:$GITHUB_TOKEN_MAGIC_MODULES" \ -H "Accept: application/vnd.github.v3+json" \ "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/statuses/$mm_commit_sha" \ -d "$post_body" @@ -68,7 +73,7 @@ post_body=$( jq -n \ curl \ -X POST \ - -u "$github_username:$GITHUB_TOKEN" \ + -u "$github_username:$GITHUB_TOKEN_MAGIC_MODULES" \ -H "Accept: application/vnd.github.v3+json" \ "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/statuses/$mm_commit_sha" \ -d "$post_body" diff --git a/.ci/scripts/go-plus/vcr-cassette-merger/vcr_merge.sh b/.ci/scripts/go-plus/vcr-cassette-merger/vcr_merge.sh index 93d7be0a77a3..7440af55c075 100755 --- a/.ci/scripts/go-plus/vcr-cassette-merger/vcr_merge.sh +++ b/.ci/scripts/go-plus/vcr-cassette-merger/vcr_merge.sh @@ -11,7 +11,7 @@ else echo "BASE_BRANCH: $BASE_BRANCH" fi -PR_NUMBER=$(curl -s -H "Authorization: token ${GITHUB_TOKEN}" \ +PR_NUMBER=$(curl -s -H "Authorization: token ${GITHUB_TOKEN_CLASSIC}" \ "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/pulls?state=closed&base=$BASE_BRANCH&sort=updated&direction=desc" | \ jq -r ".[] | if .merge_commit_sha == \"$REFERENCE\" then .number else empty end") @@ -40,4 +40,4 @@ if [ $? -eq 0 ]; then fi -set -e \ No newline at end of file +set -e diff --git a/.github/workflows/build-downstream.yml b/.github/workflows/build-downstream.yml index f8446388ee65..91f33499bd10 100644 --- a/.github/workflows/build-downstream.yml +++ b/.github/workflows/build-downstream.yml @@ -21,17 +21,17 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: - name: Checkout Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 - name: Set up Ruby - uses: ruby/setup-ruby@ec02537da5712d66d4d50a0f33b7eb52773b5ed1 + uses: ruby/setup-ruby@036ef458ddccddb148a2b9fb67e95a22fdbf728b # v1.160.0 with: ruby-version: '3.1' - name: Cache Bundler gems - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: mmv1/vendor/bundle key: ${{ runner.os }}-gems-${{ hashFiles('mmv1/**/Gemfile.lock') }} @@ -45,13 +45,13 @@ jobs: working-directory: mmv1 - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: '^1.20' # Cache Go modules - name: Cache Go modules - uses: actions/cache@v2 + uses: actions/cache@v3 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} @@ -108,7 +108,7 @@ jobs: (current_dir=$(pwd) && cd $OUTPUT_PATH && zip -r "$current_dir/output.zip" .) - name: Upload built artifacts - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@3cea5372237819ed00197afe530f5a7ea3e805c8 # v3.1.0 with: name: artifact-${{ inputs.repo }} path: output.zip \ No newline at end of file diff --git a/.github/workflows/changelog-checker.yml b/.github/workflows/changelog-checker.yml index 3158329b9c84..4e546f0e05e5 100644 --- a/.github/workflows/changelog-checker.yml +++ b/.github/workflows/changelog-checker.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: repo - name: Check Changelog diff --git a/.github/workflows/magic-modules.yml b/.github/workflows/magic-modules.yml index 1a2c88510fa2..d92e666e2b60 100644 --- a/.github/workflows/magic-modules.yml +++ b/.github/workflows/magic-modules.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: repo fetch-depth: 0 @@ -36,7 +36,7 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: repo fetch-depth: 2 @@ -49,7 +49,7 @@ jobs: git fetch origin ${{ github.base_ref }} # Fetch the base branch git merge --no-ff origin/${{ github.base_ref }} # Merge with the base branch - name: Set up Ruby - uses: ruby/setup-ruby@ec02537da5712d66d4d50a0f33b7eb52773b5ed1 + uses: ruby/setup-ruby@036ef458ddccddb148a2b9fb67e95a22fdbf728b # v1.160.0 with: ruby-version: '3.1' - name: Install dependencies diff --git a/.github/workflows/membership-checker.yml b/.github/workflows/membership-checker.yml index 2612f2c61679..7a206b519eaa 100644 --- a/.github/workflows/membership-checker.yml +++ b/.github/workflows/membership-checker.yml @@ -11,7 +11,7 @@ jobs: build-and-unit-tests: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Go uses: actions/setup-go@v4 with: diff --git a/.github/workflows/repository-documentation.yml b/.github/workflows/repository-documentation-deploy.yml similarity index 78% rename from .github/workflows/repository-documentation.yml rename to .github/workflows/repository-documentation-deploy.yml index 6ff5b1bbf966..f94df4ce4ffd 100644 --- a/.github/workflows/repository-documentation.yml +++ b/.github/workflows/repository-documentation-deploy.yml @@ -1,18 +1,19 @@ -name: repository-documentation +name: repository-documentation-deploy + +permissions: read-all on: push: branches: - - main # Set a branch to deploy - pull_request: - paths: - - 'docs/**' + - main jobs: deploy: runs-on: ubuntu-22.04 + permissions: + contents: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: true # Fetch Hugo themes (true OR recursive) fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod @@ -29,7 +30,6 @@ jobs: - name: Deploy uses: peaceiris/actions-gh-pages@de7ea6f8efb354206b205ef54722213d99067935 # v3.9.0 - if: github.ref == 'refs/heads/main' with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./docs/public diff --git a/.github/workflows/repository-documentation-test.yml b/.github/workflows/repository-documentation-test.yml new file mode 100644 index 000000000000..f2952926ef2a --- /dev/null +++ b/.github/workflows/repository-documentation-test.yml @@ -0,0 +1,27 @@ +name: repository-documentation-test + +permissions: read-all + +on: + pull_request: + paths: + - 'docs/**' + +jobs: + deploy: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + submodules: true # Fetch Hugo themes (true OR recursive) + fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod + + - name: Setup Hugo + uses: peaceiris/actions-hugo@16361eb4acea8698b220b76c0d4e84e1fd22c61d # v2.6.0 + with: + hugo-version: '0.115.0' + extended: true + + - name: Build + working-directory: ./docs + run: hugo --minify diff --git a/.github/workflows/teamcity-services-diff-check-weekly.yml b/.github/workflows/teamcity-services-diff-check-weekly.yml new file mode 100644 index 000000000000..5d857f0eda90 --- /dev/null +++ b/.github/workflows/teamcity-services-diff-check-weekly.yml @@ -0,0 +1,67 @@ +name: TeamCity Services Weekly Diff Check +permissions: read-all + +on: + # Enable ad hoc checks + workflow_dispatch: + + # Scheduled checks to catch edge cases + schedule: + # Every Tuesday morning + - cron: '0 4 * * 2' + +jobs: + terraform-provider-google: + uses: ./.github/workflows/build-downstream.yml + with: + repo: 'terraform-provider-google' + + terraform-provider-google-beta: + uses: ./.github/workflows/build-downstream.yml + with: + repo: 'terraform-provider-google-beta' + + teamcity-services-diff-check: + needs: [terraform-provider-google, terraform-provider-google-beta] + runs-on: ubuntu-22.04 + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Setup Go + uses: actions/setup-go@v4 + with: + go-version: '^1.20' + + - name: Download built artifacts - GA provider + uses: actions/download-artifact@v2 + with: + name: artifact-terraform-provider-google + path: artifacts + + - name: Unzip the artifacts and delete the zip + run: | + unzip -o artifacts/output.zip -d ./provider + rm artifacts/output.zip + + - name: Download built artifacts - Beta provider + uses: actions/download-artifact@v2 + with: + name: artifact-terraform-provider-google-beta + path: artifacts + + - name: Unzip the artifacts and delete the zip + run: | + unzip -o artifacts/output.zip -d ./provider + rm artifacts/output.zip + + - name: Check that new services have been added to the TeamCity configuration code + run: | + # Create lists of service packages in providers + ls provider/google/services > tools/teamcity-diff-check/services_ga.txt + ls provider/google-beta/services > tools/teamcity-diff-check/services_beta.txt + + # Run tool to compare service packages in the providers vs those listed in TeamCity config files + cd tools/teamcity-diff-check + go run main.go -service_file=services_ga + go run main.go -service_file=services_beta diff --git a/.github/workflows/teamcity-services-diff-check.yml b/.github/workflows/teamcity-services-diff-check.yml new file mode 100644 index 000000000000..37dd8e218554 --- /dev/null +++ b/.github/workflows/teamcity-services-diff-check.yml @@ -0,0 +1,86 @@ +name: TeamCity Services Diff Check +permissions: read-all + +on: + pull_request: + paths: + - '.github/workflows/teamcity-services-diff-check.yml' + - 'mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt' + - 'mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt' + - 'mmv1/products/**' +jobs: + check-pr: + runs-on: ubuntu-22.04 + outputs: + services: ${{steps.services.outputs.services}} + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: "Check for New Services" + id: services + run: | + newServices=$(($(git diff --name-only --diff-filter=A origin/main HEAD | grep -P "mmv1/products/.*/product.yaml" | wc -l))) + echo "services=$newServices" >> "${GITHUB_OUTPUT}" + if [ "$newServices" = "0" ];then + echo "No new service found." + fi + terraform-provider-google: + if: ${{needs.check-pr.outputs.services != '0'}} + needs: check-pr + uses: ./.github/workflows/build-downstream.yml + with: + repo: 'terraform-provider-google' + + terraform-provider-google-beta: + if: ${{needs.check-pr.outputs.services != '0'}} + needs: check-pr + uses: ./.github/workflows/build-downstream.yml + with: + repo: 'terraform-provider-google-beta' + + teamcity-services-diff-check: + needs: [terraform-provider-google, terraform-provider-google-beta] + runs-on: ubuntu-22.04 + steps: + - name: Checkout Repository + uses: actions/checkout@v4 + + - name: Setup Go + uses: actions/setup-go@v4 + with: + go-version: '^1.20' + + - name: Download built artifacts - GA provider + uses: actions/download-artifact@v2 + with: + name: artifact-terraform-provider-google + path: artifacts + + - name: Unzip the artifacts and delete the zip + run: | + unzip -o artifacts/output.zip -d ./provider + rm artifacts/output.zip + + - name: Download built artifacts - Beta provider + uses: actions/download-artifact@v2 + with: + name: artifact-terraform-provider-google-beta + path: artifacts + + - name: Unzip the artifacts and delete the zip + run: | + unzip -o artifacts/output.zip -d ./provider + rm artifacts/output.zip + + - name: Check that new services have been added to the TeamCity configuration code + run: | + # Create lists of service packages in providers + ls provider/google/services > tools/teamcity-diff-check/services_ga.txt + ls provider/google-beta/services > tools/teamcity-diff-check/services_beta.txt + + # Run tool to compare service packages in the providers vs those listed in TeamCity config files + cd tools/teamcity-diff-check + go run main.go -service_file=services_ga + go run main.go -service_file=services_beta diff --git a/.github/workflows/test-tgc.yml b/.github/workflows/test-tgc.yml index 1546a5c856ee..190bb6ae68c0 100644 --- a/.github/workflows/test-tgc.yml +++ b/.github/workflows/test-tgc.yml @@ -1,9 +1,6 @@ name: TGC Build and Unit Test -permissions: - actions: read - contents: read - statuses: write +permissions: read-all env: status_suffix: "-build-and-unit-tests" @@ -32,11 +29,31 @@ concurrency: jobs: build-and-unit-test: + permissions: + statuses: write runs-on: ubuntu-latest timeout-minutes: 30 steps: + - name: Get Job URL + if: ${{ !cancelled() }} + id: get_job + run: | + response=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/attempts/${{ github.run_attempt }}/jobs") + html_url=$(echo "$response" | jq -r --arg job_name "${{ github.job }}" '.jobs | map(select(.name == $job_name)) | .[0].html_url') + echo "url=${html_url}" >> $GITHUB_OUTPUT + - name: Post Pending Status to Pull Request + if: ${{ !cancelled() }} + run: | + curl -X POST -H "Authorization: token ${{secrets.GITHUB_TOKEN}}" \ + -H "Accept: application/vnd.github.v3+json" \ + "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/statuses/${{github.event.inputs.sha}}" \ + -d '{ + "context": "${{ github.event.inputs.repo }}${{ env.status_suffix }}", + "target_url": "${{ steps.get_job.outputs.url }}", + "state": "pending" + }' - name: Checkout Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: repository: ${{ github.event.inputs.owner }}/${{ github.event.inputs.repo }} ref: ${{ github.event.inputs.branch }} @@ -59,27 +76,9 @@ jobs: else echo "has_changes=true" >> $GITHUB_OUTPUT fi - - name: Get Job URL - if: ${{ !cancelled() }} - id: get_job - run: | - response=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/attempts/${{ github.run_attempt }}/jobs") - html_url=$(echo "$response" | jq -r --arg job_name "${{ github.job }}" '.jobs | map(select(.name == $job_name)) | .[0].html_url') - echo "url=${html_url}" >> $GITHUB_OUTPUT - - name: Post Pending Status to Pull Request - if: ${{ !cancelled() }} - run: | - curl -X POST -H "Authorization: token ${{secrets.GITHUB_TOKEN}}" \ - -H "Accept: application/vnd.github.v3+json" \ - "https://api.github.com/repos/GoogleCloudPlatform/magic-modules/statuses/${{github.event.inputs.sha}}" \ - -d '{ - "context": "${{ github.event.inputs.repo }}${{ env.status_suffix }}", - "target_url": "${{ steps.get_job.outputs.url }}", - "state": "pending" - }' - name: Set up Go if: ${{ !failure() && steps.pull_request.outputs.has_changes == 'true' }} - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: '^1.20' - name: Build Terraform Google Conversion diff --git a/.github/workflows/test-tpg.yml b/.github/workflows/test-tpg.yml index 0b5148f7d1e2..9edca790e1ec 100644 --- a/.github/workflows/test-tpg.yml +++ b/.github/workflows/test-tpg.yml @@ -1,9 +1,6 @@ name: Provider Build and Unit Test -permissions: - actions: read - contents: read - statuses: write +permissions: read-all env: status_suffix: "-build-and-unit-tests" @@ -32,36 +29,12 @@ concurrency: jobs: build-and-unit-test: + permissions: + statuses: write runs-on: ubuntu-latest timeout-minutes: 30 steps: - - name: Checkout Repository - uses: actions/checkout@v3 - with: - repository: ${{ github.event.inputs.owner }}/${{ github.event.inputs.repo }} - ref: ${{ github.event.inputs.branch }} - fetch-depth: 2 - - name: Cache Go modules and build cache - uses: actions/cache@v3 - with: - path: | - ~/go/pkg/mod - ~/.cache/go-build - key: ${{ runner.os }}-test-${{ github.event.inputs.repo }}-${{hashFiles('go.sum','google-*/transport/**','google-*/tpgresource/**','google-*/acctest/**','google-*/envvar/**','google-*/sweeper/**','google-*/verify/**') }} - restore-keys: | - ${{ runner.os }}-test-${{ github.event.inputs.repo }}-${{ hashFiles('go.sum') }} - ${{ runner.os }}-test-${{ github.event.inputs.repo }}- - - name: Check for Code Changes - id: pull_request - run: | - gofiles=$(git diff --name-only HEAD~1 | { grep -e "\.go$" -e "go.mod$" -e "go.sum$" || test $? = 1; }) - if [ -z "$gofiles" ]; then - echo "has_changes=false" >> $GITHUB_OUTPUT - else - echo "has_changes=true" >> $GITHUB_OUTPUT - fi - name: Get Job URL - if: ${{ !cancelled() }} id: get_job run: | response=$(curl --get -Ss -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -H "Accept: application/vnd.github.v3+json" "https://api.github.com/repos/${{ github.repository }}/actions/runs/${{ github.run_id }}/attempts/${{ github.run_attempt }}/jobs") @@ -78,9 +51,24 @@ jobs: "target_url": "${{ steps.get_job.outputs.url }}", "state": "pending" }' + - name: Checkout Repository + uses: actions/checkout@v4 + with: + repository: ${{ github.event.inputs.owner }}/${{ github.event.inputs.repo }} + ref: ${{ github.event.inputs.branch }} + fetch-depth: 2 + - name: Check for Code Changes + id: pull_request + run: | + gofiles=$(git diff --name-only HEAD~1 | { grep -e "\.go$" -e "go.mod$" -e "go.sum$" || test $? = 1; }) + if [ -z "$gofiles" ]; then + echo "has_changes=false" >> $GITHUB_OUTPUT + else + echo "has_changes=true" >> $GITHUB_OUTPUT + fi - name: Set up Go if: ${{ !failure() && steps.pull_request.outputs.has_changes == 'true' }} - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: '^1.20' - name: Build Provider diff --git a/.github/workflows/unit-test-tgc.yml b/.github/workflows/unit-test-tgc.yml index e1c983d92ba8..f254f91240db 100644 --- a/.github/workflows/unit-test-tgc.yml +++ b/.github/workflows/unit-test-tgc.yml @@ -28,7 +28,7 @@ jobs: rm artifacts-tpgb/output.zip - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: '^1.20' diff --git a/.github/workflows/unit-test-tpg.yml b/.github/workflows/unit-test-tpg.yml index ddb48186ff43..ac925a0fe621 100644 --- a/.github/workflows/unit-test-tpg.yml +++ b/.github/workflows/unit-test-tpg.yml @@ -26,21 +26,10 @@ jobs: rm artifacts/output.zip - name: Set up Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: '^1.20' - - name: Cache Go modules and build cache - uses: actions/cache@v3 - with: - path: | - ~/go/pkg/mod - ~/.cache/go-build - key: ${{ runner.os }}-test-${{ inputs.repo }}-${{hashFiles('go.sum','google-*/transport/**','google-*/tpgresource/**','google-*/acctest/**','google-*/envvar/**','google-*/sweeper/**','google-*/verify/**') }} - restore-keys: | - ${{ runner.os }}-test-${{ inputs.repo }}-${{ hashFiles('go.sum') }} - ${{ runner.os }}-test-${{ inputs.repo }}- - - name: Build Provider run: | go build diff --git a/.github/workflows/unit-tests-diff-processor.yml b/.github/workflows/unit-tests-diff-processor.yml index 638fb42c60c5..91d081716ab3 100644 --- a/.github/workflows/unit-tests-diff-processor.yml +++ b/.github/workflows/unit-tests-diff-processor.yml @@ -11,7 +11,7 @@ jobs: test: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Go uses: actions/setup-go@v4 diff --git a/README.md b/README.md index 1035baf6e4a7..5e40471b5628 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ # Magic Modules -Magic Modules is a code generator and CI system that's used to develop the Terraform providers -for Google Cloud Platform, [`google`](https://github.com/hashicorp/terraform-provider-google) (or TPG) and +Magic Modules is a code generator and CI system that's used to develop the Terraform provider +for Google Cloud, [`google`](https://github.com/hashicorp/terraform-provider-google) (or TPG) and [`google-beta`](https://github.com/hashicorp/terraform-provider-google-beta) (or TPGB). Magic Modules allows contributors to make changes against a single codebase and develop both diff --git a/docs/content/best-practices/_index.md b/docs/content/best-practices/_index.md index 81eebff07ebb..0942e110bf34 100644 --- a/docs/content/best-practices/_index.md +++ b/docs/content/best-practices/_index.md @@ -7,7 +7,7 @@ aliases: # Best practices -The following is a list of best practices that contributions are expected to follow in order to ensure a consistent UX for the Google Terraform provider internally and also compared to other Terraform providers. +The following is a list of best practices that contributions are expected to follow in order to ensure a consistent UX for the Terraform provider for Google Cloud internally and also compared to other Terraform providers. ## ForceNew @@ -44,7 +44,7 @@ See [magic-modules#13107](https://github.com/hashicorp/terraform-provider-google ## Add labels and annotations support -The new labels model and the new annotations model are introduced in [Terraform Google Provider 5.0.0](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_5_upgrade#provider). +The new labels model and the new annotations model are introduced in [Terraform provider for Google Cloud 5.0.0](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_5_upgrade#provider). There are now three label-related fields with the new labels model: * The `labels` field is now non-authoritative and only manages the label keys defined in your configuration for the resource. @@ -189,4 +189,4 @@ if err := tpgresource.SetDataSourceAnnotations(d); err != nil { 6. In the read mehtod, set `annotations`, and `effective_annotations` to state. 7. In the handwritten acceptance tests, add `annotations` to `ImportStateVerifyIgnore`. 8. In the corresponding data source, after the resource read method, call the function `tpgresource.SetDataSourceAnnotations(d)` to make `annotations` have all of the labels on the resource. -9. Add the documentation for these annotation-related fields. \ No newline at end of file +9. Add the documentation for these annotation-related fields. diff --git a/docs/content/develop/breaking-changes/make-a-breaking-change.md b/docs/content/develop/breaking-changes/make-a-breaking-change.md index d278a429d4dd..b9f28f9f860c 100644 --- a/docs/content/develop/breaking-changes/make-a-breaking-change.md +++ b/docs/content/develop/breaking-changes/make-a-breaking-change.md @@ -18,11 +18,11 @@ The `google` and `google-beta` providers are both considered "stable surfaces" for the purpose of releases, which means that neither provider allows breaking changes except during major releases, which are typically yearly. -Terraform users rely on the stability of Terraform providers (including the GCP -provider and other major providers.) Even as part of a major release, breaking -changes that are overly broad and/or have little benefit to users can cause -deeply negative reactions and significantly delay customers upgrading to the -new major version. +Terraform users rely on the stability of Terraform providers (including the +Google Cloud provider and other major providers.) Even as part of a major +release, breaking changes that are overly broad and/or have little benefit to +users can cause deeply negative reactions and significantly delay customers +upgrading to the new major version. Breaking changes may cause significant churn for users by forcing them to update their configurations. It also causes churn in tooling built on top of @@ -177,7 +177,7 @@ Entries should focus on the changes that users need to make when upgrading to `{{% param "majorVersion" %}}`, rather than how to write configurations after upgrading. -See [Terraform Google Provider 4.0.0 Upgrade Guide](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_4_upgrade) +See [Terraform provider for Google Cloud 4.0.0 Upgrade Guide](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/version_4_upgrade) and other upgrade guides for examples. The upgrade guide and the actual breaking change will be merged only after both are completed. diff --git a/docs/content/get-started/how-magic-modules-works.md b/docs/content/get-started/how-magic-modules-works.md index 6fc15e1549be..e11d54bccda9 100644 --- a/docs/content/get-started/how-magic-modules-works.md +++ b/docs/content/get-started/how-magic-modules-works.md @@ -8,7 +8,7 @@ aliases: # How Magic Modules works -Magic Modules can be thought of as a source of truth for how to map a GCP API resource representation to a Terraform resource (or datasource) representation. Magic Modules uses that mapping (and additional handwritten code where necessary) to generate "downstream" repositories - in particular, the Terraform providers for Google Platform: [`google`](https://github.com/hashicorp/terraform-provider-google) (or TPG) and [`google-beta`](https://github.com/hashicorp/terraform-provider-google-beta) (or TPGB). +Magic Modules can be thought of as a source of truth for how to map a GCP API resource representation to a Terraform resource (or datasource) representation. Magic Modules uses that mapping (and additional handwritten code where necessary) to generate "downstream" repositories - in particular, the Terraform providers for Google Cloud: [`google`](https://github.com/hashicorp/terraform-provider-google) (or TPG) and [`google-beta`](https://github.com/hashicorp/terraform-provider-google-beta) (or TPGB). Generation of the downstream repositories happens for every new commit in a PR (to a temporary branch owned by the [`modular-magician`](https://github.com/modular-magician/) robot user) and on every merge into the main branch (to the main branch of downstreams). Generation for PR commits allows contributors to manually examine the changes, as well as allowing automatic running of unit tests, acceptance tests, and automated checks such as breaking change detection. diff --git a/mmv1/api/async.go b/mmv1/api/async.go new file mode 100644 index 000000000000..55d756fa8312 --- /dev/null +++ b/mmv1/api/async.go @@ -0,0 +1,220 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// require 'api/object' +// require 'api/timeout' + +// Base class from which other Async classes can inherit. +type Async struct { + // Embed YamlValidator object + google.YamlValidator + + // Describes an operation + Operation Operation + + // The list of methods where operations are used. + Actions []string +} + +// def validate +// super + +// check :operation, type: Operation +// check :actions, default: %w[create delete update], type: ::Array, item_type: ::String +// end + +// def allow?(method) +// @actions.include?(method.downcase) +// end + +// Base async operation type +type Operation struct { + google.YamlValidator + + // Contains information about an long-running operation, to make + // requests for the state of an operation. + + Timeouts Timeouts + + Result Result +} + +// def validate +// check :result, type: Result +// check :timeouts, type: Api::Timeouts +// end + +// Base result class +type Result struct { + google.YamlValidator + + // Contains information about the result of an Operation + + ResourceInsideResponse bool +} + +// def validate +// super +// check :resource_inside_response, type: :boolean, default: false +// end + +// Represents an asynchronous operation definition +type OpAsync struct { + // TODO: Should embed Async or not? + // < Async + + Operation OpAsyncOperation + + Result OpAsyncResult + + Status OpAsyncStatus + + Error OpAsyncError + + // If true, include project as an argument to OperationWaitTime. + // It is intended for resources that calculate project/region from a selflink field + IncludeProject bool `yaml:"include_project"` + + // The list of methods where operations are used. + Actions []string +} + +// def initialize(operation, result, status, error) +// super() +// @operation = operation +// @result = result +// @status = status +// @error = error +// end + +// def validate +// super + +// check :operation, type: Operation, required: true +// check :result, type: Result, default: Result.new +// check :status, type: Status +// check :error, type: Error +// check :actions, default: %w[create delete update], type: ::Array, item_type: ::String +// check :include_project, type: :boolean, default: false +// end + +// The main implementation of Operation, +// corresponding to common GCP Operation resources. +type OpAsyncOperation struct { + // TODO: Should embed Operation or not? + // < Async::Operation + Kind string + + Path string + + BaseUrl string `yaml:"base_url"` + + WaitMs int `yaml:"wait_ms"` + + Timeouts Timeouts + + // Use this if the resource includes the full operation url. + FullUrl string `yaml:"full_url"` +} + +// def initialize(path, base_url, wait_ms, timeouts) +// super() +// @path = path +// @base_url = base_url +// @wait_ms = wait_ms +// @timeouts = timeouts +// end + +// def validate +// super + +// check :kind, type: String +// check :path, type: String +// check :base_url, type: String +// check :wait_ms, type: Integer + +// check :full_url, type: String + +// conflicts %i[base_url full_url] +// end + +// Represents the results of an Operation request +type OpAsyncResult struct { + Result Result `yaml:",inline"` + + Path string +} + +// def initialize(path = nil, resource_inside_response = nil) +// super() +// @path = path +// @resource_inside_response = resource_inside_response +// end + +// def validate +// super + +// check :path, type: String +// end + +// Provides information to parse the result response to check operation +// status +type OpAsyncStatus struct { + google.YamlValidator + + Path string + + Complete bool + + Allowed []bool +} + +// def initialize(path, complete, allowed) +// super() +// @path = path +// @complete = complete +// @allowed = allowed +// end + +// def validate +// super +// check :path, type: String +// check :allowed, type: Array, item_type: [::String, :boolean] +// end + +// Provides information on how to retrieve errors of the executed operations +type OpAsyncError struct { + google.YamlValidator + + Path string + + Message string +} + +// def initialize(path, message) +// super() +// @path = path +// @message = message +// end + +// def validate +// super +// check :path, type: String +// check :message, type: String +// end diff --git a/mmv1/api/object.go b/mmv1/api/object.go new file mode 100644 index 000000000000..5b2edb1f67ca --- /dev/null +++ b/mmv1/api/object.go @@ -0,0 +1,82 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// require 'google/extensions' +// require 'google/logger' +// require 'google/yaml_validator' + +// Represents an object that has a (mandatory) name +type NamedObject struct { + google.YamlValidator + + Name string + + // original value of :name before the provider override happens + // same as :name if not overridden in provider + ApiName string `yaml:"api_name"` +} + +// func (n *Named) string_array(arr) { +// types = arr.map(&:class).uniq +// types.size == 1 && types[0] == String +// } + +// func (n *Named) deep_merge(arr1, arr2) { +// // Scopes is an array of standard strings. In which case return the +// // version in the overrides. This allows scopes to be removed rather +// // than allowing for a merge of the two arrays +// if string_array?(arr1) +// return arr2.nil? ? arr1 : arr2 +// end + +// // Merge any elements that exist in both +// result = arr1.map do |el1| +// other = arr2.select { |el2| el1.name == el2.name }.first +// other.nil? ? el1 : el1.merge(other) +// end + +// // Add any elements of arr2 that don't exist in arr1 +// result + arr2.reject do |el2| +// arr1.any? { |el1| el2.name == el1.name } +// end +// } + +// func (n *Named) merge(other) { +// result = self.class.new +// instance_variables.each do |v| +// result.instance_variable_set(v, instance_variable_get(v)) +// end + +// other.instance_variables.each do |v| +// if other.instance_variable_get(v).instance_of?(Array) +// result.instance_variable_set(v, deep_merge(result.instance_variable_get(v), +// other.instance_variable_get(v))) +// else +// result.instance_variable_set(v, other.instance_variable_get(v)) +// end +// end + +// result +// } + +// func (n *Named) validate() { +// super +// check :name, type: String, required: true +// check :api_name, type: String, default: @name +// } diff --git a/mmv1/api/product.go b/mmv1/api/product.go new file mode 100644 index 000000000000..d56edeefb9be --- /dev/null +++ b/mmv1/api/product.go @@ -0,0 +1,239 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/product" + "golang.org/x/exp/slices" +) + +// require 'api/object' +// require 'api/product/version' +// require 'google/logger' +// require 'compile/core' +// require 'json' + +// Represents a product to be managed +type Product struct { + NamedObject `yaml:",inline"` + + // include Compile::Core + + // Inherited: + // The name of the product's API capitalised in the appropriate places. + // This isn't just the API name because it doesn't meaningfully separate + // words in the api name - "accesscontextmanager" vs "AccessContextManager" + // Example inputs: "Compute", "AccessContextManager" + // Name string + + // Display Name: The full name of the GCP product; eg "Cloud Bigtable" + + Objects []*Resource + + // The list of permission scopes available for the service + // For example: `https://www.googleapis.com/auth/compute` + + Scopes []string + + // The API versions of this product + + Versions []product.Version + + // The base URL for the service API endpoint + // For example: `https://www.googleapis.com/compute/v1/` + + BaseUrl string `yaml:"base_url"` + + // A function reference designed for the rare case where you + // need to use retries in operation calls. Used for the service api + // as it enables itself (self referential) and can result in occasional + // failures on operation_get. see github.com/hashicorp/terraform-provider-google/issues/9489 + + OperationRetry string `yaml:"operation_retry"` + + Async OpAsync + + LegacyName string `yaml:"legacy_name"` + + ClientName string `yaml:"client_name"` +} + +func (p *Product) Validate() { + // TODO Q1 Rewrite super + // super + for _, o := range p.Objects { + o.ProductMetadata = p + } +} + +// def validate +// super +// set_variables @objects, :__product + +// // name comes from Named, and product names must start with a capital +// caps = ('A'..'Z').to_a +// unless caps.include? @name[0] +// raise "product name `//{@name}` must start with a capital letter." +// end + +// check :display_name, type: String +// check :objects, type: Array, item_type: Api::Resource +// check :scopes, type: Array, item_type: String, required: true +// check :operation_retry, type: String + +// check :async, type: Api::Async +// check :legacy_name, type: String +// check :client_name, type: String + +// check :versions, type: Array, item_type: Api::Product::Version, required: true +// end + +// // ==================== +// // Custom Getters +// // ==================== + +// // The name of the product's API; "compute", "accesscontextmanager" +// def api_name +// name.downcase +// end + +// // The product full name is the "display name" in string form intended for +// // users to read in documentation; "Google Compute Engine", "Cloud Bigtable" +// def display_name +// if @display_name.nil? +// name.space_separated +// else +// @display_name +// end +// end + +// // Most general version that exists for the product +// // If GA is present, use that, else beta, else alpha +// def lowest_version +// Version::ORDER.each do |ordered_version_name| +// @versions.each do |product_version| +// return product_version if ordered_version_name == product_version.name +// end +// end +// raise "Unable to find lowest version for product //{display_name}" +// end + +// def version_obj(name) +// @versions.each do |v| +// return v if v.name == name +// end + +// raise "API version '//{name}' does not exist for product '//{@name}'" +// end + +// // Get the version of the object specified by the version given if present +// // Or else fall back to the closest version in the chain defined by Version::ORDER +// def version_obj_or_closest(name) +// return version_obj(name) if exists_at_version(name) + +// // versions should fall back to the closest version to them that exists +// name ||= Version::ORDER[0] +// lower_versions = Version::ORDER[0..Version::ORDER.index(name)] + +// lower_versions.reverse_each do |version| +// return version_obj(version) if exists_at_version(version) +// end + +// raise "Could not find object for version //{name} and product //{display_name}" +// end + +// def exists_at_version_or_lower(name) +// // Versions aren't normally going to be empty since products need a +// // base_url. This nil check exists for atypical products, like _bundle. +// return true if @versions.nil? + +// name ||= Version::ORDER[0] +// return false unless Version::ORDER.include?(name) + +// (0..Version::ORDER.index(name)).each do |i| +// return true if exists_at_version(Version::ORDER[i]) +// end +// false +// end + +func (p *Product) ExistsAtVersionOrLower(name string) bool { + if !slices.Contains(product.ORDER, name) { + return false + } + + for i := 0; i <= slices.Index(product.ORDER, name); i++ { + if p.ExistsAtVersion(product.ORDER[i]) { + return true + } + } + + return false +} + +func (p *Product) ExistsAtVersion(name string) bool { + for _, v := range p.Versions { + if v.Name == name { + return true + } + } + return false +} + +// def exists_at_version(name) +// // Versions aren't normally going to be empty since products need a +// // base_url. This nil check exists for atypical products, like _bundle. +// return true if @versions.nil? + +// @versions.any? { |v| v.name == name } +// end + +// // Not a conventional setter, so ignore rubocop's warning +// // rubocop:disable Naming/AccessorMethodName +// def set_properties_based_on_version(version) +// @base_url = version.base_url +// end +// // rubocop:enable Naming/AccessorMethodName + +// // ==================== +// // Debugging Methods +// // ==================== + +// def to_s +// // relies on the custom to_json definitions +// JSON.pretty_generate(self) +// end + +// // Prints a dot notation path to where the field is nested within the parent +// // object when called on a property. eg: parent.meta.label.foo +// // Redefined on Product to terminate the calls up the parent chain. +// def lineage +// name +// end + +// def to_json(opts = nil) +// json_out = {} + +// instance_variables.each do |v| +// if v == :@objects +// json_out['@resources'] = objects.to_h { |o| [o.name, o] } +// elsif instance_variable_get(v) == false || instance_variable_get(v).nil? +// // ignore false or missing because omitting them cleans up result +// // and both are the effective defaults of their types +// else +// json_out[v] = instance_variable_get(v) +// end +// end + +// JSON.generate(json_out, opts) +// end diff --git a/mmv1/api/product.rb b/mmv1/api/product.rb index cd5f11b560c5..66e56c87d6ab 100644 --- a/mmv1/api/product.rb +++ b/mmv1/api/product.rb @@ -51,10 +51,6 @@ class Product < Api::NamedObject # failures on operation_get. see github.com/hashicorp/terraform-provider-google/issues/9489 attr_reader :operation_retry - # The APIs required to be enabled for this product. - # Usually just the product's API - attr_reader :apis_required - attr_reader :async attr_reader :legacy_name diff --git a/mmv1/api/product/version.go b/mmv1/api/product/version.go new file mode 100644 index 000000000000..3c2ef6a670c0 --- /dev/null +++ b/mmv1/api/product/version.go @@ -0,0 +1,52 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package product + +// require 'api/object' + +var ORDER = []string{"ga", "beta", "alpha", "private"} + +// A version of the API for a given product / API group +// In GCP, different product versions are generally ordered where alpha is +// a superset of beta, and beta a superset of GA. Each version will have a +// different version url. +type Version struct { + // TODO: Should embed NamedObject or not? + // < Api::NamedObject + // include Comparable + + // attr_reader + CaiBaseUrl string `yaml:"cai_base_url"` + + // attr_accessor + BaseUrl string `yaml:"base_url"` + + // attr_accessor + Name string +} + +// def validate +// super +// check :cai_base_url, type: String, required: false +// check :base_url, type: String, required: true +// check :name, type: String, allowed: ORDER, required: true +// end + +// def to_s +// "//{name}: //{base_url}" +// end + +// def <=>(other) +// ORDER.index(name) <=> ORDER.index(other.name) if other.is_a?(Version) +// end diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go new file mode 100644 index 000000000000..894850e765c1 --- /dev/null +++ b/mmv1/api/resource.go @@ -0,0 +1,294 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +package api + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/resource" + "github.com/GoogleCloudPlatform/magic-modules/mmv1/provider/terraform" +) + +type Resource struct { + // Embed NamedObject + NamedObject `yaml:",inline"` + + // [Required] A description of the resource that's surfaced in provider + // documentation. + Description string + + // [Required] (Api::Resource::ReferenceLinks) Reference links provided in + // downstream documentation. + References resource.ReferenceLinks + + // [Required] The GCP "relative URI" of a resource, relative to the product + // base URL. It can often be inferred from the `create` path. + BaseUrl string `yaml:"base_url"` + + // ==================== + // Common Configuration + // ==================== + // + // [Optional] The minimum API version this resource is in. Defaults to ga. + MinVersion string `yaml:"min_version"` + + // [Optional] If set to true, don't generate the resource. + Exclude bool + + // [Optional] If set to true, the resource is not able to be updated. + Immutable bool + + // [Optional] If set to true, this resource uses an update mask to perform + // updates. This is typical of newer GCP APIs. + UpdateMask bool `yaml:"update_mask"` + + // [Optional] If set to true, the object has a `self_link` field. This is + // typical of older GCP APIs. + HasSelfLink bool `yaml:"has_self_link"` + + // [Optional] The validator "relative URI" of a resource, relative to the product + // base URL. Specific to defining the resource as a CAI asset. + CaiBaseUrl string `yaml:"cai_base_url"` + + // ==================== + // URL / HTTP Configuration + // ==================== + // + // [Optional] The "identity" URL of the resource. Defaults to: + // * base_url when the create_verb is :POST + // * self_link when the create_verb is :PUT or :PATCH + SelfLink string `yaml:"self_link"` + + // [Optional] The URL used to creating the resource. Defaults to: + // * collection url when the create_verb is :POST + // * self_link when the create_verb is :PUT or :PATCH + CreateUrl string `yaml:"create_url"` + + // [Optional] The URL used to delete the resource. Defaults to the self + // link. + DeleteUrl string `yaml:"delete_url"` + + // [Optional] The URL used to update the resource. Defaults to the self + // link. + UpdateUrl string `yaml:"update_url"` + // [Optional] The HTTP verb used during create. Defaults to :POST. + CreateVerb string `yaml:"create_verb"` + + // [Optional] The HTTP verb used during read. Defaults to :GET. + ReadVerb string `yaml:"read_verb"` + + // [Optional] The HTTP verb used during update. Defaults to :PUT. + UpdateVerb string `yaml:"update_verb"` + + // [Optional] The HTTP verb used during delete. Defaults to :DELETE. + DeleteVerb string `yaml:"delete_verb"` + + // [Optional] Additional Query Parameters to append to GET. Defaults to "" + ReadQueryParams string `yaml:"read_query_params"` + + // ==================== + // Collection / Identity URL Configuration + // ==================== + // + // [Optional] This is the name of the list of items + // within the collection (list) json. Will default to the + // camelcase plural name of the resource. + CollectionUrlKey string `yaml:"collection_url_key"` + + // [Optional] An ordered list of names of parameters that uniquely identify + // the resource. + // Generally, it's safe to leave empty, in which case it defaults to `name`. + // Other values are normally useful in cases where an object has a parent + // and is identified by some non-name value, such as an ip+port pair. + // If you're writing a fine-grained resource (eg with nested_query) a value + // must be set. + Identity []string + + // [Optional] (Api::Resource::NestedQuery) This is useful in case you need + // to change the query made for GET requests only. In particular, this is + // often used to extract an object from a parent object or a collection. + // Note that if both nested_query and custom_code.decoder are provided, + // the decoder will be included within the code handling the nested query. + NestedQuery resource.NestedQuery `yaml:"nested_query"` + + // ==================== + // IAM Configuration + // ==================== + // + // [Optional] (Api::Resource::IamPolicy) Configuration of a resource's + // resource-specific IAM Policy. + IamPolicy resource.IamPolicy `yaml:"iam_policy"` + + // [Optional] If set to true, don't generate the resource itself; only + // generate the IAM policy. + // TODO rewrite: rename? + ExcludeResource bool `yaml:"exclude_resource"` + + // [Optional] GCP kind, e.g. `compute//disk` + Kind string + + // [Optional] If set to true, indicates that a resource is not configurable + // such as GCP regions. + Readonly bool + + // ==================== + // Terraform Overrides + // ==================== + // [Optional] If non-empty, overrides the full filename prefix + // i.e. google/resource_product_{{resource_filename_override}}.go + // i.e. google/resource_product_{{resource_filename_override}}_test.go + FilenameOverride string `yaml:"filename_override"` + + // If non-empty, overrides the full given resource name. + // i.e. 'google_project' for resourcemanager.Project + // Use Provider::Terraform::Config.legacy_name to override just + // product name. + // Note: This should not be used for vanity names for new products. + // This was added to handle preexisting handwritten resources that + // don't match the natural generated name exactly, and to support + // services with a mix of handwritten and generated resources. + LegacyName string `yaml:"legacy_name"` + + // The Terraform resource id format used when calling //setId(...). + // For instance, `{{name}}` means the id will be the resource name. + IdFormat string `yaml:"id_format"` + + // Override attribute used to handwrite the formats for generating regex strings + // that match templated values to a self_link when importing, only necessary when + // a resource is not adequately covered by the standard provider generated options. + // Leading a token with `%` + // i.e. {{%parent}}/resource/{{resource}} + // will allow that token to hold multiple /'s. + ImportFormat []string `yaml:"import_format"` + + CustomCode terraform.CustomCode `yaml:"custom_code"` + + Docs terraform.Docs + + // This block inserts entries into the customdiff.All() block in the + // resource schema -- the code for these custom diff functions must + // be included in the resource constants or come from tpgresource + CustomDiff []string `yaml:"custom_diff"` + + // Lock name for a mutex to prevent concurrent API calls for a given + // resource. + Mutex string + + // Examples in documentation. Backed by generated tests, and have + // corresponding OiCS walkthroughs. + Examples []terraform.Examples + + // Virtual fields on the Terraform resource. Usage and differences from url_param_only + // are documented in provider/terraform/virtual_fields.rb + VirtualFields interface{} `yaml:"virtual_fields"` + + // If true, generates product operation handling logic. + AutogenAsync bool `yaml:"autogen_async"` + + // If true, resource is not importable + ExcludeImport bool `yaml:"exclude_import"` + + // If true, exclude resource from Terraform Validator + // (i.e. terraform-provider-conversion) + ExcludeTgc bool `yaml:"exclude_tgc"` + + // If true, skip sweeper generation for this resource + SkipSweeper bool `yaml:"skip_sweeper"` + + Timeouts Timeouts + + // An array of function names that determine whether an error is retryable. + ErrorRetryPredicates []string `yaml:"error_retry_predicates"` + + // An array of function names that determine whether an error is not retryable. + ErrorAbortPredicates []string `yaml:"error_abort_predicates"` + + // Optional attributes for declaring a resource's current version and generating + // state_upgrader code to the output .go file from files stored at + // mmv1/templates/terraform/state_migrations/ + // used for maintaining state stability with resources first provisioned on older api versions. + SchemaVersion int `yaml:"schema_version"` + + // From this schema version on, state_upgrader code is generated for the resource. + // When unset, state_upgrade_base_schema_version defauts to 0. + // Normally, it is not needed to be set. + StateUpgradeBaseSchemaVersion int `yaml:"state_upgrade_base_schema_version"` + + StateUpgraders bool `yaml:"state_upgraders"` + + // This block inserts the named function and its attribute into the + // resource schema -- the code for the migrate_state function must + // be included in the resource constants or come from tpgresource + // included for backwards compatibility as an older state migration method + // and should not be used for new resources. + MigrateState string `yaml:"migrate_state"` + + // Set to true for resources that are unable to be deleted, such as KMS keyrings or project + // level resources such as firebase project + SkipDelete bool `yaml:"skip_delete"` + + // Set to true for resources that are unable to be read from the API, such as + // public ca external account keys + SkipRead bool `yaml:"skip_read"` + + // Set to true for resources that wish to disable automatic generation of default provider + // value customdiff functions + // TODO rewrite: 1 instance used + SkipDefaultCdiff bool `yaml:"skip_default_cdiff"` + + // This enables resources that get their project via a reference to a different resource + // instead of a project field to use User Project Overrides + SupportsIndirectUserProjectOverride bool `yaml:"supports_indirect_user_project_override"` + + // If true, the resource's project field can be specified as either the short form project + // id or the long form projects/project-id. The extra projects/ string will be removed from + // urls and ids. This should only be used for resources that previously supported long form + // project ids for backwards compatibility. + LegacyLongFormProject bool `yaml:"legacy_long_form_project"` + + // Function to transform a read error so that handleNotFound recognises + // it as a 404. This should be added as a handwritten fn that takes in + // an error and returns one. + ReadErrorTransform string `yaml:"read_error_transform"` + + // If true, resources that failed creation will be marked as tainted. As a consequence + // these resources will be deleted and recreated on the next apply call. This pattern + // is preferred over deleting the resource directly in post_create_failure hooks. + TaintResourceOnFailedCreate bool `yaml:"taint_resource_on_failed_create"` + + // Add a deprecation message for a resource that's been deprecated in the API. + DeprecationMessage string `yaml:"deprecation_message"` + + Properties []*Type + + Parameters []*Type + + ProductMetadata *Product +} + +// TODO: rewrite functions +func (r *Resource) Validate() { + // TODO Q1 Rewrite super + // super + + r.setResourceMetada(r.Parameters) + r.setResourceMetada(r.Properties) +} + +func (r *Resource) setResourceMetada(properties []*Type) { + if properties == nil { + return + } + + for _, property := range properties { + property.ResourceMetadata = r + } +} diff --git a/mmv1/api/resource/iam_policy.go b/mmv1/api/resource/iam_policy.go new file mode 100644 index 000000000000..1c47394ad0fc --- /dev/null +++ b/mmv1/api/resource/iam_policy.go @@ -0,0 +1,168 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// Information about the IAM policy for this resource +// Several GCP resources have IAM policies that are scoped to +// and accessed via their parent resource +// See: https://cloud.google.com/iam/docs/overview +type IamPolicy struct { + google.YamlValidator + + // boolean of if this binding should be generated + + // attr_reader + Exclude bool + + // boolean of if this binding should be generated + + // attr_reader + ExcludeTgc bool + + // Boolean of if tests for IAM resources should exclude import test steps + // Used to handle situations where typical generated IAM tests cannot import + // due to the parent resource having an API-generated id + + // attr_reader + SkipImportTest bool + + // Character that separates resource identifier from method call in URL + // For example, PubSub subscription uses {resource}:getIamPolicy + // While Compute subnetwork uses {resource}/getIamPolicy + + // attr_reader + MethodNameSeparator string + + // The terraform type of the parent resource if it is not the same as the + // IAM resource. The IAP product needs these as its IAM policies refer + // to compute resources + + // attr_reader + ParentResourceType string + + // Some resources allow retrieving the IAM policy with GET requests, + // others expect POST requests + + // attr_reader + FetchIamPolicyVerb string + + // Last part of URL for fetching IAM policy. + + // attr_reader + FetchIamPolicyMethod string + + // Some resources allow setting the IAM policy with POST requests, + // others expect PUT requests + + // attr_reader + SetIamPolicyVerb string + + // Last part of URL for setting IAM policy. + + // attr_reader + SetIamPolicyMethod string + + // Whether the policy JSON is contained inside of a 'policy' object. + + // attr_reader + WrappedPolicyObj bool + + // Certain resources allow different sets of roles to be set with IAM policies + // This is a role that is acceptable for the given IAM policy resource for use in tests + + // attr_reader + AllowedIamRole string + + // This is a role that grants create/read/delete for the parent resource for use in tests. + // If set, the test runner will receive a binding to this role in _policy tests in order to + // avoid getting locked out of the resource. + + // attr_reader + AdminIamRole string + + // Certain resources need an attribute other than "id" from their parent resource + // Especially when a parent is not the same type as the IAM resource + + // attr_reader + ParentResourceAttribute string + + // If the IAM resource test needs a new project to be created, this is the name of the project + + // attr_reader + TestProjectName string + + // Resource name may need a custom diff suppress function. Default is to use + // CompareSelfLinkOrResourceName + + // attr_reader + CustomDiffSuppress *string + + // Some resources (IAP) use fields named differently from the parent resource. + // We need to use the parent's attributes to create an IAM policy, but they may not be + // named as the IAM IAM resource expects. + // This allows us to specify a file (relative to MM root) containing a partial terraform + // config with the test/example attributes of the IAM resource. + + // attr_reader + ExampleConfigBody string + + // How the API supports IAM conditions + + // attr_reader + IamConditionsRequestType string + + // Allows us to override the base_url of the resource. This is required for Cloud Run as the + // IAM resources use an entirely different base URL from the actual resource + + // attr_reader + BaseUrl string + + // Allows us to override the import format of the resource. Useful for Cloud Run where we need + // variables that are outside of the base_url qualifiers. + + // attr_reader + ImportFormat []string + + // Allows us to override the self_link of the resource. This is required for Artifact Registry + // to prevent breaking changes + + // attr_reader + SelfLink string + + // [Optional] Version number in the request payload. + // if set, it overrides the default IamPolicyVersion + + // attr_reader + IamPolicyVersion string + + // [Optional] Min version to make IAM resources available at + // If unset, defaults to 'ga' + + // attr_reader + MinVersion string + + // [Optional] Check to see if zone value should be replaced with GOOGLE_ZONE in iam tests + // Defaults to true + + // attr_reader + SubstituteZoneValue bool +} + +// func (p *IamPolicy) validate() { + +// } diff --git a/mmv1/api/resource/nested_query.go b/mmv1/api/resource/nested_query.go new file mode 100644 index 000000000000..9cc55e737037 --- /dev/null +++ b/mmv1/api/resource/nested_query.go @@ -0,0 +1,65 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// require 'api/object' +// require 'google/string_utils' + +// Metadata for resources that are nested within a parent resource, as +// a list of resources or single object within the parent. +// e.g. Fine-grained resources +type NestedQuery struct { + google.YamlValidator + + // A list of keys to traverse in order. + // i.e. backendBucket --> cdnPolicy.signedUrlKeyNames + // should be ["cdnPolicy", "signedUrlKeyNames"] + + // attr_reader : + Keys []string + + // If true, we expect the the nested list to be + // a list of IDs for the nested resource, rather + // than a list of nested resource objects + // i.e. backendBucket.cdnPolicy.signedUrlKeyNames is a list of key names + // rather than a list of the actual key objects + + // attr_reader : + IsListOfIds bool + + // If true, the resource is created/updated/deleted by patching + // the parent resource and appropriate encoders/update_encoders/pre_delete + // custom code will be included automatically. Only use if parent resource + // does not have a separate endpoint (set as create/delete/update_urls) + // for updating this resource. + // The resulting encoded data will be mapped as + // { + // keys[-1] : list_of_objects + // } + + // attr_reader : + ModifyByPatch bool +} + +// def validate +// super + +// check :keys, type: Array, item_type: String, required: true +// check :is_list_of_ids, type: :boolean, default: false +// check :modify_by_patch, type: :boolean, default: false +// end diff --git a/mmv1/api/resource/reference_links.go b/mmv1/api/resource/reference_links.go new file mode 100644 index 000000000000..6237308ffb4d --- /dev/null +++ b/mmv1/api/resource/reference_links.go @@ -0,0 +1,39 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// Represents a list of documentation links. +type ReferenceLinks struct { + google.YamlValidator + + // guides containing + // name: The title of the link + // value: The URL to navigate on click + + //attr_reader + Guides map[string]string + + // the url of the API guide + + //attr_reader + Api string +} + +// func (l *ReferenceLinks) validate() { + +// } diff --git a/mmv1/api/timeouts.go b/mmv1/api/timeouts.go new file mode 100644 index 000000000000..41134697a969 --- /dev/null +++ b/mmv1/api/timeouts.go @@ -0,0 +1,51 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// require 'api/object' + +// Default timeout for all operation types is 20, the Terraform default (https://www.terraform.io/plugin/sdkv2/resources/retries-and-customizable-timeouts) +// minutes. This can be overridden for each resource. +const DEFAULT_INSERT_TIMEOUT_MINUTES = 20 +const DEFAULT_UPDATE_TIMEOUT_MINUTES = 20 +const DEFAULT_DELETE_TIMEOUT_MINUTES = 20 + +// Provides timeout information for the different operation types +type Timeouts struct { + google.YamlValidator + + InsertMinutes int `yaml:"insert_minutes"` + + UpdateMinutes int `yaml:"update_minutes"` + + DeleteMinutes int `yaml:"delete_minutes"` +} + +// def initialize +// super + +// validate +// end + +// def validate +// super + +// check :insert_minutes, type: Integer, default: DEFAULT_INSERT_TIMEOUT_MINUTES +// check :update_minutes, type: Integer, default: DEFAULT_UPDATE_TIMEOUT_MINUTES +// check :delete_minutes, type: Integer, default: DEFAULT_DELETE_TIMEOUT_MINUTES +// end diff --git a/mmv1/api/type.go b/mmv1/api/type.go new file mode 100644 index 000000000000..0fe6f165e6dd --- /dev/null +++ b/mmv1/api/type.go @@ -0,0 +1,976 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package api + +// require 'api/object' +// require 'google/string_utils' +// require 'provider/terraform/validation' + +// Represents a property type +type Type struct { + NamedObject `yaml:",inline"` + + // TODO: improve the parsing of properties based on type in resource yaml files. + Type string + + // TODO: set a specific type intead of interface{} + DefaultValue interface{} `yaml:"default_value"` + + Description string + + Exclude bool + + // Add a deprecation message for a field that's been deprecated in the API + // use the YAML chomping folding indicator (>-) if this is a multiline + // string, as providers expect a single-line one w/o a newline. + DeprecationMessage string `yaml:"deprecation_message"` + + // Add a removed message for fields no longer supported in the API. This should + // be used for fields supported in one version but have been removed from + // a different version. + RemovedMessage string `yaml:"removed_message"` + + // If set value will not be sent to server on sync. + // For nested fields, this also needs to be set on each descendant (ie. self, + // child, etc.). + Output bool + + // If set to true, changes in the field's value require recreating the + // resource. + // For nested fields, this only applies at the current level. This means + // it should be explicitly added to each field that needs the ForceNew + // behavior. + Immutable bool + + // url_param_only will not send the field in the resource body and will + // not attempt to read the field from the API response. + // NOTE - this doesn't work for nested fields + UrlParamOnly bool `yaml:"url_param_only"` + + // For nested fields, this only applies within the parent. + // For example, an optional parent can contain a required child. + Required bool + + // Additional query Parameters to append to GET calls. + ReadQueryParams string `yaml:"read_query_params"` + + UpdateVerb string `yaml:"update_verb"` + + UpdateUrl string `yaml:"update_url"` + + // Some updates only allow updating certain fields at once (generally each + // top-level field can be updated one-at-a-time). If this is set, we group + // fields to update by (verb, url, fingerprint, id) instead of just + // (verb, url, fingerprint), to allow multiple fields to reuse the same + // endpoints. + UpdateId string `yaml:"update_id"` + + // The fingerprint value required to update this field. Downstreams should + // GET the resource and parse the fingerprint value while doing each update + // call. This ensures we can supply the fingerprint to each distinct + // request. + FingerprintName string `yaml:"fingerprint_name"` + + // If true, we will include the empty value in requests made including + // this attribute (both creates and updates). This rarely needs to be + // set to true, and corresponds to both the "NullFields" and + // "ForceSendFields" concepts in the autogenerated API clients. + SendEmptyValue bool `yaml:"send_empty_value"` + + // [Optional] If true, empty nested objects are sent to / read from the + // API instead of flattened to null. + // The difference between this and send_empty_value is that send_empty_value + // applies when the key of an object is empty; this applies when the values + // are all nil / default. eg: "expiration: null" vs "expiration: {}" + // In the case of Terraform, this occurs when a block in config has optional + // values, and none of them are used. Terraform returns a nil instead of an + // empty map[string]interface{} like we'd expect. + AllowEmptyObject bool `yaml:"allow_empty_object"` + + MinVersion string `yaml:"min_version"` + + ExactVersion string `yaml:"exact_version"` + + // A list of properties that conflict with this property. Uses the "lineage" + // field to identify the property eg: parent.meta.label.foo + Conflicts []string + + // A list of properties that at least one of must be set. + AtLeastOneOf []string `yaml:"at_least_one_of"` + + // A list of properties that exactly one of must be set. + ExactlyOneOf []string `yaml:"exactly_one_of"` + + // A list of properties that are required to be set together. + RequiredWith []string `yaml:"required_with"` + + // Can only be overridden - we should never set this ourselves. + // TODO: set a specific type intead of interface{} + NewType interface{} + + // A pattern that maps expected user input to expected API input. + // TODO: remove? + Pattern string + + Properties []Type + + EnumValues []string `yaml:"enum_values"` + + ItemType string `yaml:"item_type"` + + // ==================== + // Terraform Overrides + // ==================== + + // Adds a DiffSuppressFunc to the schema + DiffSuppressFunc string `yaml:"diff_suppress_func"` + + StateFunc string `yaml:"state_func"` // Adds a StateFunc to the schema + + Sensitive bool // Adds `Sensitive: true` to the schema + + // Does not set this value to the returned API value. Useful for fields + // like secrets where the returned API value is not helpful. + IgnoreRead bool `yaml:"ignore_read"` + + // Adds a ValidateFunc to the schema + Validation bool + + // Indicates that this is an Array that should have Set diff semantics. + UnorderedList bool `yaml:"unordered_list"` + + IsSet bool `yaml:"is_set"` // Uses a Set instead of an Array + + // Optional function to determine the unique ID of an item in the set + // If not specified, schema.HashString (when elements are string) or + // schema.HashSchema are used. + SetHashFunc string `yaml:"set_hash_func"` + + // if true, then we get the default value from the Google API if no value + // is set in the terraform configuration for this field. + // It translates to setting the field to Computed & Optional in the schema. + // For nested fields, this only applies at the current level. This means + // it should be explicitly added to each field that needs the defaulting + // behavior. + DefaultFromApi bool `yaml:"default_from_api"` + + // https://github.com/hashicorp/terraform/pull/20837 + // Apply a ConfigMode of SchemaConfigModeAttr to the field. + // This should be avoided for new fields, and only used with old ones. + SchemaConfigModeAttr bool `yaml:"schema_config_mode_attr"` + + // Names of fields that should be included in the updateMask. + UpdateMaskFields []string `yaml:"update_mask_fields"` + + // For a TypeMap, the expander function to call on the key. + // Defaults to expandString. + KeyExpander string `yaml:"key_expander"` + + // For a TypeMap, the DSF to apply to the key. + KeyDiffSuppressFunc string `yaml:"key_diff_suppress_func"` + + // ==================== + // Schema Modifications + // ==================== + // Schema modifications change the schema of a resource in some + // fundamental way. They're not very portable, and will be hard to + // generate so we should limit their use. Generally, if you're not + // converting existing Terraform resources, these shouldn't be used. + // + // With great power comes great responsibility. + + // Flattens a NestedObject by removing that field from the Terraform + // schema but will preserve it in the JSON sent/retrieved from the API + // + // EX: a API schema where fields are nested (eg: `one.two.three`) and we + // desire the properties of the deepest nested object (eg: `three`) to + // become top level properties in the Terraform schema. By overriding + // the properties `one` and `one.two` and setting flatten_object then + // all the properties in `three` will be at the root of the TF schema. + // + // We need this for cases where a field inside a nested object has a + // default, if we can't spend a breaking change to fix a misshapen + // field, or if the UX is _much_ better otherwise. + // + // WARN: only fully flattened properties are currently supported. In the + // example above you could not flatten `one.two` without also flattening + // all of it's parents such as `one` + FlattenObject bool `yaml:"flatten_object"` + + // =========== + // Custom code + // =========== + // All custom code attributes are string-typed. The string should + // be the name of a template file which will be compiled in the + // specified / described place. + + // A custom expander replaces the default expander for an attribute. + // It is called as part of Create, and as part of Update if + // object.input is false. It can return an object of any type, + // so the function header *is* part of the custom code template. + // As with flatten, `property` and `prefix` are available. + CustomExpand string `yaml:"custom_expand"` + + // A custom flattener replaces the default flattener for an attribute. + // It is called as part of Read. It can return an object of any + // type, and may sometimes need to return an object with non-interface{} + // type so that the d.Set() call will succeed, so the function + // header *is* a part of the custom code template. To help with + // creating the function header, `property` and `prefix` are available, + // just as they are in the standard flattener template. + CustomFlatten string `yaml:"custom_flatten"` + + ResourceMetadata *Resource + + ParentMetadata *Type // is nil for top-level properties +} + +const MAX_NAME = 20 + +// func (t *Type) validate() { +// super +// check :description, type: ::String, required: true +// check :exclude, type: :boolean, default: false, required: true +// check :deprecation_message, type: ::String +// check :removed_message, type: ::String +// check :min_version, type: ::String +// check :exact_version, type: ::String +// check :output, type: :boolean +// check :required, type: :boolean +// check :send_empty_value, type: :boolean +// check :allow_empty_object, type: :boolean +// check :url_param_only, type: :boolean +// check :read_query_params, type: ::String +// check :immutable, type: :boolean + +// raise 'Property cannot be output and required at the same time.' \ +// if @output && @required + +// check :update_verb, type: Symbol, allowed: %i[POST PUT PATCH NONE], +// default: @__resource&.update_verb + +// check :update_url, type: ::String +// check :update_id, type: ::String +// check :fingerprint_name, type: ::String +// check :pattern, type: ::String + +// check_default_value_property +// check_conflicts +// check_at_least_one_of +// check_exactly_one_of +// check_required_with + +// check :sensitive, type: :boolean, default: false +// check :is_set, type: :boolean, default: false +// check :default_from_api, type: :boolean, default: false +// check :unordered_list, type: :boolean, default: false +// check :schema_config_mode_attr, type: :boolean, default: false + +// // technically set as a default everywhere, but only maps will use this. +// check :key_expander, type: ::String, default: 'tpgresource.ExpandString' +// check :key_diff_suppress_func, type: ::String + +// check :diff_suppress_func, type: ::String +// check :state_func, type: ::String +// check :validation, type: Provider::Terraform::Validation +// check :set_hash_func, type: ::String + +// check :custom_flatten, type: ::String +// check :custom_expand, type: ::String + +// raise "'default_value' and 'default_from_api' cannot be both set" \ +// if @default_from_api && !@default_value.nil? +// } + +// func (t *Type) to_s() { +// JSON.pretty_generate(self) +// } + +// Prints a dot notation path to where the field is nested within the parent +// object. eg: parent.meta.label.foo +// The only intended purpose is to allow better error messages. Some objects +// and at some points in the build this doesn't output a valid output. +// func (t *Type) lineage() { +// return name&.underscore if __parent.nil? + +// "//{__parent.lineage}.//{name&.underscore}" +// } + +// Prints the access path of the field in the configration eg: metadata.0.labels +// The only intended purpose is to get the value of the labes field by calling d.Get(). +// func (t *Type) terraform_lineage() { +// return name&.underscore if __parent.nil? || __parent.flatten_object + +// "//{__parent.terraform_lineage}.0.//{name&.underscore}" +// } + +// func (t *Type) to_json(opts) { +// ignore fields that will contain references to parent resources and +// those which will be added later +// ignored_fields = %i[@resource @__parent @__resource @api_name @update_verb +// @__name @name @properties] +// json_out = {} + +// instance_variables.each do |v| +// if v == :@conflicts && instance_variable_get(v).empty? +// // ignore empty conflict arrays +// elsif v == :@at_least_one_of && instance_variable_get(v).empty? +// // ignore empty at_least_one_of arrays +// elsif v == :@exactly_one_of && instance_variable_get(v).empty? +// // ignore empty exactly_one_of arrays +// elsif v == :@required_with && instance_variable_get(v).empty? +// // ignore empty required_with arrays +// elsif instance_variable_get(v) == false || instance_variable_get(v).nil? +// // ignore false booleans as non-existence indicates falsey +// elsif !ignored_fields.include? v +// json_out[v] = instance_variable_get(v) +// end +// end + +// // convert properties to a hash based on name for nested readability +// json_out.merge!(properties&.map { |p| [p.name, p] }.to_h) \ +// if respond_to? 'properties' + +// JSON.generate(json_out, opts) +// } + +// func (t *Type) check_default_value_property() { +// return if @default_value.nil? + +// case self +// when Api::Type::String +// clazz = ::String +// when Api::Type::Integer +// clazz = ::Integer +// when Api::Type::Double +// clazz = ::Float +// when Api::Type::Enum +// clazz = ::Symbol +// when Api::Type::Boolean +// clazz = :boolean +// when Api::Type::ResourceRef +// clazz = [::String, ::Hash] +// else +// raise "Update 'check_default_value_property' method to support " \ +// "default value for type //{self.class}" +// end + +// check :default_value, type: clazz +// } + +// Checks that all conflicting properties actually exist. +// This currently just returns if empty, because we don't want to do the check, since +// this list will have a full path for nested attributes. +// func (t *Type) check_conflicts() { +// check :conflicts, type: ::Array, default: [], item_type: ::String + +// return if @conflicts.empty? +// } + +// Returns list of properties that are in conflict with this property. +// func (t *Type) conflicting() { +// return [] unless @__resource + +// @conflicts +// } + +// Checks that all properties that needs at least one of their fields actually exist. +// This currently just returns if empty, because we don't want to do the check, since +// this list will have a full path for nested attributes. +// func (t *Type) check_at_least_one_of() { +// check :at_least_one_of, type: ::Array, default: [], item_type: ::String + +// return if @at_least_one_of.empty? +// } + +// Returns list of properties that needs at least one of their fields set. +// func (t *Type) at_least_one_of_list() { +// return [] unless @__resource + +// @at_least_one_of +// } + +// Checks that all properties that needs exactly one of their fields actually exist. +// This currently just returns if empty, because we don't want to do the check, since +// this list will have a full path for nested attributes. +// func (t *Type) check_exactly_one_of() { +// check :exactly_one_of, type: ::Array, default: [], item_type: ::String + +// return if @exactly_one_of.empty? +// } + +// Returns list of properties that needs exactly one of their fields set. +// func (t *Type) exactly_one_of_list() { +// return [] unless @__resource + +// @exactly_one_of +// } + +// Checks that all properties that needs required with their fields actually exist. +// This currently just returns if empty, because we don't want to do the check, since +// this list will have a full path for nested attributes. +// func (t *Type) check_required_with() { +// check :required_with, type: ::Array, default: [], item_type: ::String + +// return if @required_with.empty? +// } + +// Returns list of properties that needs required with their fields set. +// func (t *Type) required_with_list() { +// // return [] unless @__resource + +// // @required_with +// } + +// func (t *Type) type() { +// // self.class.name.split('::').last +// } + +// func (t *Type) parent() { +// // @__parent +// } + +// func (t *Type) min_version() { +// // if @min_version.nil? +// // @__resource.min_version +// // else +// // @__resource.__product.version_obj(@min_version) +// // end +// } + +// func (t *Type) exact_version() { +// // return nil if @exact_version.nil? || @exact_version.empty? + +// // @__resource.__product.version_obj(@exact_version) +// } + +// func (t *Type) exclude_if_not_in_version!(version) { +// // @exclude ||= exact_version != version unless exact_version.nil? +// // @exclude ||= version < min_version +// } + +// // Overriding is_a? to enable class overrides. +// // Ruby does not let you natively change types, so this is the next best +// // thing. +// func (t *Type) is_a?(clazz) { +// // return Module.const_get(@new_type).new.is_a?(clazz) if @new_type + +// // super(clazz) +// } + +// // Overriding class to enable class overrides. +// // Ruby does not let you natively change types, so this is the next best +// // thing. +// func (t *Type) class() { +// // return Module.const_get(@new_type) if @new_type + +// // super +// } + +// // Returns nested properties for this property. +// func (t *Type) nested_properties() { +// // nil +// } + +// func (t *Type) removed() { +// // !(@removed_message.nil? || @removed_message == '') +// } + +// func (t *Type) deprecated() { +// // !(@deprecation_message.nil? || @deprecation_message == '') +// } + +// // private + +// // A constant value to be provided as field +// type Constant struct { +// // < Type +// value + +// func (t *Type) validate +// @description = "This is always //{value}." +// super +// end +// } + +// // Represents a primitive (non-composite) type. +// class Primitive < Type +// end + +// // Represents a boolean +// class Boolean < Primitive +// end + +// // Represents an integer +// class Integer < Primitive +// end + +// // Represents a double +// class Double < Primitive +// end + +// // Represents a string +// class String < Primitive +// func (t *Type) initialize(name = nil) +// super() + +// @name = name +// end + +// PROJECT = Api::Type::String.new('project') +// NAME = Api::Type::String.new('name') +// end + +// // Properties that are fetched externally +// class FetchedExternal < Type + +// func (t *Type) validate +// @conflicts ||= [] +// @at_least_one_of ||= [] +// @exactly_one_of ||= [] +// @required_with ||= [] +// end + +// func (t *Type) api_name +// name +// end +// end + +// class Path < Primitive +// end + +// // Represents a fingerprint. A fingerprint is an output-only +// // field used for optimistic locking during updates. +// // They are fetched from the GCP response. +// class Fingerprint < FetchedExternal +// func (t *Type) validate +// super +// @output = true if @output.nil? +// end +// end + +// // Represents a timestamp +// class Time < Primitive +// end + +// // A base class to tag objects that are composed by other objects (arrays, +// // nested objects, etc) +// class Composite < Type +// end + +// // Forwarding declaration to allow defining Array::NESTED_ARRAY_TYPE +// class NestedObject < Composite +// end + +// // Forwarding declaration to allow defining Array::RREF_ARRAY_TYPE +// class ResourceRef < Type +// end + +// // Represents an array, and stores its items' type +// class Array < Composite +// item_type +// min_size +// max_size + +// func (t *Type) validate +// super +// if @item_type.is_a?(NestedObject) || @item_type.is_a?(ResourceRef) +// @item_type.set_variable(@name, :__name) +// @item_type.set_variable(@__resource, :__resource) +// @item_type.set_variable(self, :__parent) +// end +// check :item_type, type: [::String, NestedObject, ResourceRef, Enum], required: true + +// unless @item_type.is_a?(NestedObject) || @item_type.is_a?(ResourceRef) \ +// || @item_type.is_a?(Enum) || type?(@item_type) +// raise "Invalid type //{@item_type}" +// end + +// check :min_size, type: ::Integer +// check :max_size, type: ::Integer +// end + +// func (t *Type) property_class +// case @item_type +// when NestedObject, ResourceRef +// type = @item_type.property_class +// when Enum +// raise 'aaaa' +// else +// type = property_ns_prefix +// type << get_type(@item_type).new(@name).type +// end +// type[-1] = "//{type[-1].camelize(:upper)}Array" +// type +// end + +// func (t *Type) exclude_if_not_in_version!(version) +// super +// @item_type.exclude_if_not_in_version!(version) \ +// if @item_type.is_a? NestedObject +// end + +// func (t *Type) nested_properties +// return @item_type.nested_properties.reject(&:exclude) \ +// if @item_type.is_a?(Api::Type::NestedObject) + +// super +// end + +// func (t *Type) item_type_class +// return @item_type \ +// if @item_type.instance_of?(Class) + +// Object.const_get(@item_type) +// end +// end + +// // Represents an enum, and store is valid values +// class Enum < Primitive +// values +// skip_docs_values + +// func (t *Type) validate +// super +// check :values, type: ::Array, item_type: [Symbol, ::String, ::Integer], required: true +// check :skip_docs_values, type: :boolean +// end + +// func (t *Type) merge(other) +// result = self.class.new +// instance_variables.each do |v| +// result.instance_variable_set(v, instance_variable_get(v)) +// end + +// other.instance_variables.each do |v| +// if other.instance_variable_get(v).instance_of?(Array) +// result.instance_variable_set(v, deep_merge(result.instance_variable_get(v), +// other.instance_variable_get(v))) +// else +// result.instance_variable_set(v, other.instance_variable_get(v)) +// end +// end + +// result +// end +// end + +// // Represents a 'selfLink' property, which returns the URI of the resource. +// class SelfLink < FetchedExternal +// EXPORT_KEY = 'selfLink'.freeze + +// resource + +// func (t *Type) name +// EXPORT_KEY +// end + +// func (t *Type) out_name +// EXPORT_KEY.underscore +// end +// end + +// // Represents a reference to another resource +// class ResourceRef < Type +// // The fields which can be overridden in provider.yaml. +// module Fields +// resource +// imports +// end +// include Fields + +// func (t *Type) validate +// super +// @name = @resource if @name.nil? +// @description = "A reference to //{@resource} resource" \ +// if @description.nil? + +// return if @__resource.nil? || @__resource.exclude || @exclude + +// check :resource, type: ::String, required: true +// check :imports, type: ::String, required: TrueClass + +// // TODO: (camthornton) product reference may not exist yet +// return if @__resource.__product.nil? + +// check_resource_ref_property_exists +// end + +// func (t *Type) property +// props = resource_ref.all_user_properties +// .select { |prop| prop.name == @imports } +// return props.first unless props.empty? +// end + +// func (t *Type) resource_ref +// product = @__resource.__product +// resources = product.objects.select { |obj| obj.name == @resource } + +// resources[0] +// end + +// func (t *Type) property_class +// type = property_ns_prefix +// type << [@resource, @imports, 'Ref'] +// type[-1] = type[-1].join('_').camelize(:upper) +// type +// end + +// private + +// func (t *Type) check_resource_ref_property_exists +// return unless defined?(resource_ref.all_user_properties) + +// exported_props = resource_ref.all_user_properties +// exported_props << Api::Type::String.new('selfLink') \ +// if resource_ref.has_self_link +// raise "'//{@imports}' does not exist on '//{@resource}'" \ +// if exported_props.none? { |p| p.name == @imports } +// end +// end + +// // An structured object composed of other objects. +// class NestedObject < Composite + +// func (t *Type) validate +// @description = 'A nested object resource' if @description.nil? +// @name = @__name if @name.nil? +// super + +// raise "Properties missing on //{name}" if @properties.nil? + +// @properties.each do |p| +// p.set_variable(@__resource, :__resource) +// p.set_variable(self, :__parent) +// end +// check :properties, type: ::Array, item_type: Api::Type, required: true +// end + +// func (t *Type) property_class +// type = property_ns_prefix +// type << [@__resource.name, @name] +// type[-1] = type[-1].join('_').camelize(:upper) +// type +// end + +// // Returns all properties including the ones that are excluded +// // This is used for PropertyOverride validation +// func (t *Type) all_properties +// @properties +// end + +// func (t *Type) properties +// raise "Field '//{lineage}' properties are nil!" if @properties.nil? + +// @properties.reject(&:exclude) +// end + +// func (t *Type) nested_properties +// properties +// end + +// // Returns the list of top-level properties once any nested objects with +// // flatten_object set to true have been collapsed +// func (t *Type) root_properties +// properties.flat_map do |p| +// if p.flatten_object +// p.root_properties +// else +// p +// end +// end +// end + +// func (t *Type) exclude_if_not_in_version!(version) +// super +// @properties.each { |p| p.exclude_if_not_in_version!(version) } +// end +// end + +// // An array of string -> string key -> value pairs, such as labels. +// // While this is technically a map, it's split out because it's a much +// // simpler property to generate and means we can avoid conditional logic +// // in Map. +// class KeyValuePairs < Composite +// // Ignore writing the "effective_labels" and "effective_annotations" fields to API. +// ignore_write + +// func (t *Type) initialize(name: nil, output: nil, api_name: nil, description: nil, min_version: nil, +// ignore_write: nil, update_verb: nil, update_url: nil, immutable: nil) +// super() + +// @name = name +// @output = output +// @api_name = api_name +// @description = description +// @min_version = min_version +// @ignore_write = ignore_write +// @update_verb = update_verb +// @update_url = update_url +// @immutable = immutable +// end + +// func (t *Type) validate +// super +// check :ignore_write, type: :boolean, default: false + +// return if @__resource.__product.nil? + +// product_name = @__resource.__product.name +// resource_name = @__resource.name + +// if lineage == 'labels' || lineage == 'metadata.labels' || +// lineage == 'configuration.labels' +// if !(is_a? Api::Type::KeyValueLabels) && +// // The label value must be empty string, so skip this resource +// !(product_name == 'CloudIdentity' && resource_name == 'Group') && + +// // The "labels" field has type Array, so skip this resource +// !(product_name == 'DeploymentManager' && resource_name == 'Deployment') && + +// // https://github.com/hashicorp/terraform-provider-google/issues/16219 +// !(product_name == 'Edgenetwork' && resource_name == 'Network') && + +// // https://github.com/hashicorp/terraform-provider-google/issues/16219 +// !(product_name == 'Edgenetwork' && resource_name == 'Subnet') && + +// // "userLabels" is the resource labels field +// !(product_name == 'Monitoring' && resource_name == 'NotificationChannel') && + +// // The "labels" field has type Array, so skip this resource +// !(product_name == 'Monitoring' && resource_name == 'MetricDescriptor') +// raise "Please use type KeyValueLabels for field //{lineage} " \ +// "in resource //{product_name}///{resource_name}" +// end +// elsif is_a? Api::Type::KeyValueLabels +// raise "Please don't use type KeyValueLabels for field //{lineage} " \ +// "in resource //{product_name}///{resource_name}" +// end + +// if lineage == 'annotations' || lineage == 'metadata.annotations' +// if !(is_a? Api::Type::KeyValueAnnotations) && +// // The "annotations" field has "ouput: true", so skip this eap resource +// !(product_name == 'Gkeonprem' && resource_name == 'BareMetalAdminClusterEnrollment') +// raise "Please use type KeyValueAnnotations for field //{lineage} " \ +// "in resource //{product_name}///{resource_name}" +// end +// elsif is_a? Api::Type::KeyValueAnnotations +// raise "Please don't use type KeyValueAnnotations for field //{lineage} " \ +// "in resource //{product_name}///{resource_name}" +// end +// end + +// func (t *Type) field_min_version +// @min_version +// end +// end + +// // An array of string -> string key -> value pairs used specifically for the "labels" field. +// // The field name with this type should be "labels" literally. +// class KeyValueLabels < KeyValuePairs +// func (t *Type) validate +// super +// return unless @name != 'labels' + +// raise "The field //{name} has the type KeyValueLabels, but the field name is not 'labels'!" +// end +// end + +// // An array of string -> string key -> value pairs used for the "terraform_labels" field. +// class KeyValueTerraformLabels < KeyValuePairs +// end + +// // An array of string -> string key -> value pairs used for the "effective_labels" +// // and "effective_annotations" fields. +// class KeyValueEffectiveLabels < KeyValuePairs +// end + +// // An array of string -> string key -> value pairs used specifically for the "annotations" field. +// // The field name with this type should be "annotations" literally. +// class KeyValueAnnotations < KeyValuePairs +// func (t *Type) validate +// super +// return unless @name != 'annotations' + +// raise "The field //{name} has the type KeyValueAnnotations,\ +// but the field name is not 'annotations'!" +// end +// end + +// // Map from string keys -> nested object entries +// class Map < Composite +// // .yaml. +// module Fields +// // The type definition of the contents of the map. +// value_type + +// // While the API doesn't give keys an explicit name, we specify one +// // because in Terraform the key has to be a property of the object. +// // +// // The name of the key. Used in the Terraform schema as a field name. +// key_name + +// // A description of the key's format. Used in Terraform to describe +// // the field in documentation. +// key_description +// end +// include Fields + +// func (t *Type) validate +// super +// check :key_name, type: ::String, required: true +// check :key_description, type: ::String + +// @value_type.set_variable(@name, :__name) +// @value_type.set_variable(@__resource, :__resource) +// @value_type.set_variable(self, :__parent) +// check :value_type, type: Api::Type::NestedObject, required: true +// raise "Invalid type //{@value_type}" unless type?(@value_type) +// end + +// func (t *Type) nested_properties +// @value_type.nested_properties.reject(&:exclude) +// end +// end + +// // Support for schema ValidateFunc functionality. +// class Validation < Object +// // Ensures the value matches this regex +// regex +// function + +// func (t *Type) validate +// super + +// check :regex, type: String +// check :function, type: String +// end +// end + +// func (t *Type) type?(type) +// type.is_a?(Type) || !get_type(type).nil? +// end + +// func (t *Type) get_type(type) +// Module.const_get(type) +// end + +// func (t *Type) property_ns_prefix +// [ +// 'Google', +// @__resource.__product.name.camelize(:upper), +// 'Property' +// ] +// end +// end diff --git a/mmv1/compiler.rb b/mmv1/compiler.rb index 4767f53c5f6d..2e4bc8b5b3ce 100755 --- a/mmv1/compiler.rb +++ b/mmv1/compiler.rb @@ -177,7 +177,8 @@ resources = [] Dir["#{product_name}/*"].each do |file_path| next if File.basename(file_path) == 'product.yaml' \ - || File.extname(file_path) != '.yaml' + || File.extname(file_path) != '.yaml' \ + || File.basename(file_path).include?('go_') if override_dir # Skip if resource will be merged in the override loop @@ -197,7 +198,8 @@ ovr_prod_dir = File.join(override_dir, product_name) Dir["#{ovr_prod_dir}/*"].each do |override_path| next if File.basename(override_path) == 'product.yaml' \ - || File.extname(override_path) != '.yaml' + || File.extname(override_path) != '.yaml' \ + || File.basename(override_path).include?('go_') file_path = File.join(product_name, File.basename(override_path)) res_yaml = if File.exist?(file_path) diff --git a/mmv1/go.mod b/mmv1/go.mod new file mode 100644 index 000000000000..24d9a6c630d1 --- /dev/null +++ b/mmv1/go.mod @@ -0,0 +1,8 @@ +module github.com/GoogleCloudPlatform/magic-modules/mmv1 + +go 1.20 + +require ( + golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 + gopkg.in/yaml.v2 v2.4.0 +) diff --git a/mmv1/go.sum b/mmv1/go.sum new file mode 100644 index 000000000000..b8ec4cc09d5a --- /dev/null +++ b/mmv1/go.sum @@ -0,0 +1,6 @@ +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= +golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= diff --git a/mmv1/google/yaml_validator.go b/mmv1/google/yaml_validator.go new file mode 100644 index 000000000000..282b43b5f41e --- /dev/null +++ b/mmv1/google/yaml_validator.go @@ -0,0 +1,155 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package google + +import ( + "log" + + "gopkg.in/yaml.v2" +) + +// A helper class to validate contents coming from YAML files. +type YamlValidator struct{} + +func (v *YamlValidator) Parse(content []byte, obj interface{}) { + // TODO(nelsonjr): Allow specifying which symbols to restrict it further. + // But it requires inspecting all configuration files for symbol sources, + // such as Enum values. Leaving it as a nice-to-have for the future. + if err := yaml.Unmarshal(content, obj); err != nil { + log.Fatalf("Cannot unmarshal data: %v", err) + } +} + +// func (v *YamlValidator) allowed_classes() { +// ObjectSpace.each_object(Class).select do |klass| +// klass < Google::YamlValidator +// end.push(Time, Symbol) +// } + +// func (v *YamlValidator) validate() { +// Google::LOGGER.debug "Validating //{self.class} '//{@name}'" +// check_extraneous_properties +// } + +// func (v *YamlValidator) set_variable(value, property) { +// Google::LOGGER.debug "Setting variable of //{value} to //{self}" +// instance_variable_set("@//{property}", value) +// } + +// Does all validation checking for a particular variable. +// options: +// :default - the default value for this variable if its nil +// :type - the allowed types (single or array) that this value can be +// :item_type - the allowed types that all values in this array should be +// (implied that type == array) +// :allowed - the allowed values that this non-array variable should be. +// :required - is the variable required? (defaults: false) +// func (v *YamlValidator) check(variable, **opts) { +// value = instance_variable_get("@//{variable}") + +// // Set default value. +// if !opts[:default].nil? && value.nil? +// instance_variable_set("@//{variable}", opts[:default]) +// value = instance_variable_get("@//{variable}") +// end + +// // Check if value is required. Print nested path if available. +// lineage_path = respond_to?('lineage') ? lineage : '' +// raise "//{lineage_path} > Missing '//{variable}'" if value.nil? && opts[:required] +// return if value.nil? + +// // Check type +// check_property_value(variable, value, opts[:type]) if opts[:type] + +// // Check item_type +// if value.is_a?(Array) +// raise "//{lineage_path} > //{variable} must have item_type on arrays" unless opts[:item_type] + +// value.each_with_index do |o, index| +// check_property_value("//{variable}[//{index}]", o, opts[:item_type]) +// end +// end + +// // Check if value is allowed +// return unless opts[:allowed] +// raise "//{value} on //{variable} should be one of //{opts[:allowed]}" \ +// unless opts[:allowed].include?(value) +// } + +// func (v *YamlValidator) conflicts(list) { +// value_checked = false +// list.each do |item| +// next if instance_variable_get("@//{item}").nil? +// raise "//{list.join(',')} cannot be set at the same time" if value_checked + +// value_checked = true +// end +// } + +// private + +// func (v *YamlValidator) check_type(name, object, type) { +// if type == :boolean +// return unless [TrueClass, FalseClass].find_index(object.class).nil? +// elsif type.is_a? ::Array +// return if type.find_index(:boolean) && [TrueClass, FalseClass].find_index(object.class) +// return unless type.find_index(object.class).nil? +// // check if class is or inherits from type +// elsif object.class <= type +// return +// end +// raise "Property '//{name}' is '//{object.class}' instead of '//{type}'" +// } + +// func (v *YamlValidator) log_check_type(object) { +// if object.respond_to?(:name) +// Google::LOGGER.debug "Checking object //{object.name}" +// else +// Google::LOGGER.debug "Checking object //{object}" +// end +// } + +// func (v *YamlValidator) check_property_value(property, prop_value, type) { +// Google::LOGGER.debug "Checking '//{property}' on //{object_display_name}" +// check_type property, prop_value, type unless type.nil? +// prop_value.validate if prop_value.is_a?(Api::Object) +// } + +// func (v *YamlValidator) check_extraneous_properties() { +// instance_variables.each do |variable| +// var_name = variable.id2name[1..] +// next if var_name.start_with?('__') + +// Google::LOGGER.debug "Validating '//{var_name}' on //{object_display_name}" +// raise "Extraneous variable '//{var_name}' in //{object_display_name}" \ +// unless methods.include?(var_name.intern) +// end +// } + +// func (v *YamlValidator) set_variables(objects, property) { +// return if objects.nil? + +// objects.each do |object| +// object.set_variable(self, property) if object.respond_to?(:set_variable) +// end +// } + +// func (v *YamlValidator) ensure_property_does_not_exist(property) { +// raise "Conflict of property '//{property}' for object '//{self}'" \ +// unless instance_variable_get("@//{property}").nil? +// } + +// func (v *YamlValidator) object_display_name() { +// "//{@name}" +// } diff --git a/mmv1/main.go b/mmv1/main.go new file mode 100644 index 000000000000..b01aad864eaf --- /dev/null +++ b/mmv1/main.go @@ -0,0 +1,159 @@ +package main + +import ( + "fmt" + "log" + "os" + "path" + "path/filepath" + "sort" + "strings" + + "golang.org/x/exp/slices" + + "github.com/GoogleCloudPlatform/magic-modules/mmv1/api" + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" + "github.com/GoogleCloudPlatform/magic-modules/mmv1/provider" +) + +func main() { + // TODO Q2: parse flags + var version = "beta" + var outputPath = "." + var generateCode = true + var generateDocs = true + + log.Printf("Initiating go MM compiler") + + // TODO Q1: allow specifying one product (flag or hardcoded) + // var productsToGenerate []string + // var allProducts = true + var productsToGenerate = []string{"products/datafusion"} + var allProducts = false + + var allProductFiles []string = make([]string, 0) + + files, err := filepath.Glob("products/**/product.yaml") + if err != nil { + return + } + for _, filePath := range files { + dir := filepath.Dir(filePath) + allProductFiles = append(allProductFiles, fmt.Sprintf("products/%s", filepath.Base(dir))) + } + // TODO Q2: override directory + + if allProducts { + productsToGenerate = allProductFiles + } + + if productsToGenerate == nil || len(productsToGenerate) == 0 { + log.Fatalf("No product.yaml file found.") + } + + log.Printf("Generating MM output to '%s'", outputPath) + log.Printf("Using %s version", version) + + // Building compute takes a long time and can't be parallelized within the product + // so lets build it first + sort.Slice(allProductFiles, func(i int, j int) bool { + if allProductFiles[i] == "compute" { + return true + } + return false + }) + + yamlValidator := google.YamlValidator{} + + for _, productName := range allProductFiles { + productYamlPath := path.Join(productName, "go_product.yaml") + + // TODO Q2: uncomment the error check that if the product.yaml exists for each product + // after Go-converted product.yaml files are complete for all products + // if _, err := os.Stat(productYamlPath); errors.Is(err, os.ErrNotExist) { + // log.Fatalf("%s does not contain a product.yaml file", productName) + // } + + // TODO Q2: product overrides + + if _, err := os.Stat(productYamlPath); err == nil { + // TODO Q1: remove these lines, which are for debugging + // log.Printf("productYamlPath %#v", productYamlPath) + + var resources []*api.Resource = make([]*api.Resource, 0) + + productYaml, err := os.ReadFile(productYamlPath) + if err != nil { + log.Fatalf("Cannot open the file: %v", productYaml) + } + productApi := &api.Product{} + yamlValidator.Parse(productYaml, productApi) + + // TODO Q1: remove these lines, which are for debugging + // prod, _ := json.Marshal(productApi) + // log.Printf("prod %s", string(prod)) + + if !productApi.ExistsAtVersionOrLower(version) { + log.Printf("%s does not have a '%s' version, skipping", productName, version) + continue + } + + resourceFiles, err := filepath.Glob(fmt.Sprintf("%s/*", productName)) + if err != nil { + log.Fatalf("Cannot get resources files: %v", err) + } + for _, resourceYamlPath := range resourceFiles { + if filepath.Base(resourceYamlPath) == "product.yaml" || filepath.Ext(resourceYamlPath) != ".yaml" { + continue + } + + // Prepend "go_" to the Go yaml files' name to distinguish with the ruby yaml files + if filepath.Base(resourceYamlPath) == "go_product.yaml" || !strings.HasPrefix(filepath.Base(resourceYamlPath), "go_") { + continue + } + + // TODO Q1: remove these lines, which are for debugging + // log.Printf(" resourceYamlPath %s", resourceYamlPath) + resourceYaml, err := os.ReadFile(resourceYamlPath) + if err != nil { + log.Fatalf("Cannot open the file: %v", resourceYamlPath) + } + resource := &api.Resource{} + yamlValidator.Parse(resourceYaml, resource) + + // TODO Q1: remove these lines, which are for debugging + // res, _ := json.Marshal(resource) + // log.Printf("resource %s", string(res)) + + // TODO Q1: add labels related fields + + resource.Validate() + resources = append(resources, resource) + } + + // TODO Q2: override resources + log.Printf("resources before sorting %#v", resources) + + // Sort resources by name + sort.Slice(resources, func(i, j int) bool { + return resources[i].Name < resources[j].Name + }) + + productApi.Objects = resources + productApi.Validate() + + // TODO Q2: set other providers via flag + providerToGenerate := provider.NewTerraform(productApi) + + if !slices.Contains(productsToGenerate, productName) { + log.Printf("%s not specified, skipping generation", productName) + continue + } + + log.Printf("%s: Generating files", productName) + providerToGenerate.Generate(outputPath, productName, generateCode, generateDocs) + } + + // TODO Q2: copy common files + } +} diff --git a/mmv1/openapi_generate/parser.rb b/mmv1/openapi_generate/parser.rb index 57f013b7ec93..fe5d9a3008a8 100644 --- a/mmv1/openapi_generate/parser.rb +++ b/mmv1/openapi_generate/parser.rb @@ -33,7 +33,9 @@ def run def write_object(name, obj, type, url_param) field = nil case name - when 'projectsId' + when 'projectsId', 'project' + # projectsId and project are omitted in MMv1 as they are inferred from + # the presence of {{project}} in the URL return field when 'locationsId' name = 'location' @@ -126,6 +128,12 @@ def write_object(name, obj, type, url_param) field.instance_variable_set(:@output, obj.read_only) end + # x-google-identifier fields are described by AIP 203 and are represented + # as output only in Terraform. + if obj.instance_variable_get(:@raw_schema)['x-google-identifier'] + field.instance_variable_set(:@output, true) + end + if (obj.respond_to?(:write_only) && obj.write_only) \ || obj.instance_variable_get(:@raw_schema)['x-google-immutable'] field.instance_variable_set(:@immutable, true) @@ -155,8 +163,8 @@ def parse_openapi(spec_path, resource_path, resource_name) parameters = [] path.post.parameters.each do |param| parameter_object = write_object(param.name, param, param.schema.type, true) - # Ignore standard requestId field - next if param.name == 'requestId' + # Ignore standard requestId and validateOnly params + next if param.name == 'requestId' || param.name == 'validateOnly' next if parameter_object.nil? # All parameters are immutable diff --git a/mmv1/products/apphub/Application.yaml b/mmv1/products/apphub/Application.yaml new file mode 100644 index 000000000000..fbac56d6ba63 --- /dev/null +++ b/mmv1/products/apphub/Application.yaml @@ -0,0 +1,189 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- !ruby/object:Api::Resource +base_url: projects/{{project}}/locations/{{location}}/applications +create_url: projects/{{project}}/locations/{{location}}/applications?applicationId={{application_id}} +self_link: projects/{{project}}/locations/{{location}}/applications/{{application_id}} +id_format: projects/{{project}}/locations/{{location}}/applications/{{application_id}} +import_format: + - projects/{{project}}/locations/{{location}}/applications/{{application_id}} +name: Application +description: 'Application is a functional grouping of Services and Workloads that helps achieve a desired end-to-end business functionality. + Services and Workloads are owned by the Application.' +autogen_async: true +examples: + - !ruby/object:Provider::Terraform::Examples + name: "application_basic" + primary_resource_id: "example" + config_path: "templates/terraform/examples/apphub_application_basic.tf.erb" + vars: + application_id: "example-application" + - !ruby/object:Provider::Terraform::Examples + name: "application_full" + primary_resource_id: "example2" + config_path: "templates/terraform/examples/apphub_application_full.tf.erb" + vars: + application_id: "example-application" + display_name: "Application Full" + description: "Application for testing" + business_name: "Alice" + business_email: "alice@google.com" + developer_name: "Bob" + developer_email: "bob@google.com" + operator_name: "Charlie" + operator_email: "charlie@google.com" +properties: + - !ruby/object:Api::Type::String + name: name + output: true + description: "Identifier. The resource name of an Application. Format:\n\"projects/{host-project-id}/locations/{location}/applications/{application-id}\" " + - !ruby/object:Api::Type::String + name: displayName + description: 'Optional. User-defined name for the Application. ' + - !ruby/object:Api::Type::String + name: description + description: 'Optional. User-defined description of an Application. ' + - !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Enum + name: type + description: 'Criticality type. ' + required: true + values: + - :MISSION_CRITICAL + - :HIGH + - :MEDIUM + - :LOW + name: criticality + description: 'Criticality of the Application, Service, or Workload ' + - !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Enum + name: type + description: 'Environment type. ' + required: true + values: + - :PRODUCTION + - :STAGING + - :TEST + - :DEVELOPMENT + name: environment + description: 'Environment of the Application, Service, or Workload ' + - !ruby/object:Api::Type::Array + name: developerOwners + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: displayName + description: 'Optional. Contact''s name. ' + - !ruby/object:Api::Type::String + name: email + required: true + description: 'Required. Email address of the contacts. ' + description: 'Optional. Developer team that owns development and coding. ' + - !ruby/object:Api::Type::Array + name: operatorOwners + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: displayName + description: 'Optional. Contact''s name. ' + - !ruby/object:Api::Type::String + name: email + required: true + description: 'Required. Email address of the contacts. ' + description: 'Optional. Operator team that ensures runtime and operations. ' + - !ruby/object:Api::Type::Array + name: businessOwners + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: displayName + description: 'Optional. Contact''s name. ' + - !ruby/object:Api::Type::String + name: email + required: true + description: 'Required. Email address of the contacts. ' + description: 'Optional. Business team that ensures user needs are met and value + is delivered ' + name: attributes + description: 'Consumer provided attributes. ' + - !ruby/object:Api::Type::String + name: createTime + description: 'Output only. Create time. ' + output: true + - !ruby/object:Api::Type::String + name: updateTime + description: 'Output only. Update time. ' + output: true + - !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Enum + name: type + description: "Required. Scope Type. \n Possible values:\nREGIONAL" + required: true + values: + - :REGIONAL + name: scope + description: 'Scope of an application. ' + required: true + - !ruby/object:Api::Type::String + name: uid + description: 'Output only. A universally unique identifier (in UUID4 format) for + the `Application`. ' + output: true + - !ruby/object:Api::Type::Enum + name: state + description: "Output only. Application state. \n Possible values:\n STATE_UNSPECIFIED\nCREATING\nACTIVE\nDELETING" + output: true + values: + - :STATE_UNSPECIFIED + - :CREATING + - :ACTIVE + - :DELETING +parameters: + - !ruby/object:Api::Type::String + name: location + description: 'Part of `parent`. See documentation of `projectsId`. ' + url_param_only: true + required: true + immutable: true + - !ruby/object:Api::Type::String + name: applicationId + description: 'Required. The Application identifier. ' + url_param_only: true + required: true + immutable: true +async: !ruby/object:Api::OpAsync + operation: !ruby/object:Api::OpAsync::Operation + path: name + base_url: "{{op_id}}" + wait_ms: 1000 + timeouts: + result: !ruby/object:Api::OpAsync::Result + path: response + resource_inside_response: true + status: !ruby/object:Api::OpAsync::Status + path: done + complete: true + allowed: + - true + - false + error: !ruby/object:Api::OpAsync::Error + path: error + message: message +update_verb: :PATCH +update_mask: true diff --git a/mmv1/products/apphub/product.yaml b/mmv1/products/apphub/product.yaml new file mode 100644 index 000000000000..4df439355736 --- /dev/null +++ b/mmv1/products/apphub/product.yaml @@ -0,0 +1,23 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +--- !ruby/object:Api::Product +name: Apphub +display_name: App Hub +versions: + - !ruby/object:Api::Product::Version + name: ga + base_url: https://apphub.googleapis.com/v1/ +scopes: + - https://www.googleapis.com/auth/cloud-platform diff --git a/mmv1/products/artifactregistry/Repository.yaml b/mmv1/products/artifactregistry/Repository.yaml index 6e0c73eb468f..965a3fab1923 100644 --- a/mmv1/products/artifactregistry/Repository.yaml +++ b/mmv1/products/artifactregistry/Repository.yaml @@ -181,6 +181,7 @@ properties: name: 'dockerConfig' description: |- Docker repository config contains repository level configuration for the repositories of docker type. + allow_empty_object: true properties: - !ruby/object:Api::Type::Boolean name: 'immutableTags' diff --git a/mmv1/products/bigquery/Routine.yaml b/mmv1/products/bigquery/Routine.yaml index 513a80b503d4..3dbbfd3b0577 100644 --- a/mmv1/products/bigquery/Routine.yaml +++ b/mmv1/products/bigquery/Routine.yaml @@ -26,7 +26,7 @@ import_format: ['projects/{{project}}/datasets/{{dataset_id}}/routines/{{routine_id}}'] examples: - !ruby/object:Provider::Terraform::Examples - name: 'big_query_routine_basic' + name: 'bigquery_routine_basic' primary_resource_id: 'sproc' primary_resource_name: "fmt.Sprintf(\"tf_test_dataset_id%s\", context[\"random_suffix\"\ @@ -35,7 +35,7 @@ examples: dataset_id: 'dataset_id' routine_id: 'routine_id' - !ruby/object:Provider::Terraform::Examples - name: 'big_query_routine_json' + name: 'bigquery_routine_json' primary_resource_id: 'sproc' primary_resource_name: "fmt.Sprintf(\"tf_test_dataset_id%s\", context[\"random_suffix\"\ @@ -44,7 +44,7 @@ examples: dataset_id: 'dataset_id' routine_id: 'routine_id' - !ruby/object:Provider::Terraform::Examples - name: 'big_query_routine_tvf' + name: 'bigquery_routine_tvf' primary_resource_id: 'sproc' primary_resource_name: "fmt.Sprintf(\"tf_test_dataset_id%s\", context[\"random_suffix\"\ @@ -53,26 +53,34 @@ examples: dataset_id: 'dataset_id' routine_id: 'routine_id' - !ruby/object:Provider::Terraform::Examples - name: 'big_query_routine_pyspark' + name: 'bigquery_routine_pyspark' primary_resource_id: 'pyspark' vars: dataset_id: 'dataset_id' connection_id: 'connection_id' routine_id: 'routine_id' - !ruby/object:Provider::Terraform::Examples - name: 'big_query_routine_pyspark_mainfile' + name: 'bigquery_routine_pyspark_mainfile' primary_resource_id: 'pyspark_mainfile' vars: dataset_id: 'dataset_id' connection_id: 'connection_id' routine_id: 'routine_id' - !ruby/object:Provider::Terraform::Examples - name: 'big_query_routine_spark_jar' + name: 'bigquery_routine_spark_jar' primary_resource_id: 'spark_jar' vars: dataset_id: 'dataset_id' connection_id: 'connection_id' routine_id: 'routine_id' + - !ruby/object:Provider::Terraform::Examples + skip_test: true + name: 'bigquery_routine_remote_function' + primary_resource_id: 'remote_function' + vars: + dataset_id: 'dataset_id' + connection_id: 'connection_id' + routine_id: 'routine_id' properties: - !ruby/object:Api::Type::NestedObject name: routineReference @@ -283,3 +291,33 @@ properties: description: | The fully qualified name of a class in jarUris, for example, com.example.wordcount. Exactly one of mainClass and main_jar_uri field should be set for Java/Scala language type. + - !ruby/object:Api::Type::NestedObject + name: 'remoteFunctionOptions' + description: Remote function specific options. + properties: + - !ruby/object:Api::Type::String + name: 'endpoint' + description: | + Endpoint of the user-provided remote service, e.g. + `https://us-east1-my_gcf_project.cloudfunctions.net/remote_add` + - !ruby/object:Api::Type::String + name: 'connection' + description: | + Fully qualified name of the user-provided connection object which holds + the authentication information to send requests to the remote service. + Format: "projects/{projectId}/locations/{locationId}/connections/{connectionId}" + - !ruby/object:Api::Type::KeyValuePairs + name: 'userDefinedContext' + description: | + User-defined context as a set of key/value pairs, which will be sent as function + invocation context together with batched arguments in the requests to the remote + service. The total number of bytes of keys and values must be less than 8KB. + + An object containing a list of "key": value pairs. Example: + `{ "name": "wrench", "mass": "1.3kg", "count": "3" }`. + default_from_api: true + - !ruby/object:Api::Type::String + name: 'maxBatchingRows' + description: | + Max number of rows in each batch sent to the remote service. If absent or if 0, + BigQuery dynamically decides the number of rows in a batch. diff --git a/mmv1/products/certificatemanager/Certificate.yaml b/mmv1/products/certificatemanager/Certificate.yaml index d6cc703156cb..119379747fd0 100644 --- a/mmv1/products/certificatemanager/Certificate.yaml +++ b/mmv1/products/certificatemanager/Certificate.yaml @@ -88,6 +88,13 @@ examples: dns_auth_name2: 'dns-auth2' dns_auth_subdomain2: 'subdomain2' cert_name: 'dns-cert' + - !ruby/object:Provider::Terraform::Examples + name: 'certificate_manager_google_managed_regional_certificate_dns_auth' + primary_resource_id: 'default' + vars: + dns_auth_name: 'dns-auth' + dns_auth_subdomain: 'subdomain' + cert_name: 'dns-cert' custom_code: !ruby/object:Provider::Terraform::CustomCode constants: templates/terraform/constants/cert_manager.erb parameters: diff --git a/mmv1/products/certificatemanager/DnsAuthorization.yaml b/mmv1/products/certificatemanager/DnsAuthorization.yaml index bcb50c60c329..92e1acdd3494 100644 --- a/mmv1/products/certificatemanager/DnsAuthorization.yaml +++ b/mmv1/products/certificatemanager/DnsAuthorization.yaml @@ -13,9 +13,9 @@ --- !ruby/object:Api::Resource name: 'DnsAuthorization' -base_url: 'projects/{{project}}/locations/global/dnsAuthorizations' -create_url: 'projects/{{project}}/locations/global/dnsAuthorizations?dnsAuthorizationId={{name}}' -self_link: 'projects/{{project}}/locations/global/dnsAuthorizations/{{name}}' +base_url: 'projects/{{project}}/locations/{{location}}/dnsAuthorizations' +create_url: 'projects/{{project}}/locations/{{location}}/dnsAuthorizations?dnsAuthorizationId={{name}}' +self_link: 'projects/{{project}}/locations/{{location}}/dnsAuthorizations/{{name}}' update_verb: :PATCH update_mask: true description: | @@ -39,7 +39,9 @@ async: !ruby/object:Api::OpAsync docs: !ruby/object:Provider::Terraform::Docs autogen_async: true import_format: - ['projects/{{project}}/locations/global/dnsAuthorizations/{{name}}'] + ['projects/{{project}}/locations/{{location}}/dnsAuthorizations/{{name}}'] +schema_version: 1 +state_upgraders: true examples: - !ruby/object:Provider::Terraform::Examples name: 'certificate_manager_dns_authorization_basic' @@ -48,6 +50,13 @@ examples: dns_auth_name: 'dns-auth' zone_name: 'my-zone' subdomain: 'subdomain' + - !ruby/object:Provider::Terraform::Examples + name: 'certificate_manager_dns_authorization_regional' + primary_resource_id: 'default' + vars: + dns_auth_name: 'dns-auth' + zone_name: 'my-zone' + subdomain: 'subdomain' parameters: - !ruby/object:Api::Type::String name: 'name' @@ -58,6 +67,13 @@ parameters: Name of the resource; provided by the client when the resource is created. The name must be 1-64 characters long, and match the regular expression [a-zA-Z][a-zA-Z0-9_-]* which means the first character must be a letter, and all following characters must be a dash, underscore, letter or digit. + - !ruby/object:Api::Type::String + name: 'location' + description: | + The Certificate Manager location. If not specified, "global" is used. + default_value: global + immutable: true + url_param_only: true properties: - !ruby/object:Api::Type::String name: 'description' @@ -75,6 +91,22 @@ properties: A domain which is being authorized. A DnsAuthorization resource covers a single domain and its wildcard, e.g. authorization for "example.com" can be used to issue certificates for "example.com" and "*.example.com". + - !ruby/object:Api::Type::Enum + name: type + description: | + type of DNS authorization. If unset during the resource creation, FIXED_RECORD will + be used for global resources, and PER_PROJECT_RECORD will be used for other locations. + + FIXED_RECORD DNS authorization uses DNS-01 validation method + + PER_PROJECT_RECORD DNS authorization allows for independent management + of Google-managed certificates with DNS authorization across multiple + projects. + immutable: true + values: + - :FIXED_RECORD + - :PER_PROJECT_RECORD + default_from_api: true - !ruby/object:Api::Type::NestedObject name: 'dnsResourceRecord' output: true diff --git a/mmv1/products/clouddeploy/Automation.yaml b/mmv1/products/clouddeploy/Automation.yaml index 6af88d3d88f4..bb46c92440bb 100644 --- a/mmv1/products/clouddeploy/Automation.yaml +++ b/mmv1/products/clouddeploy/Automation.yaml @@ -21,7 +21,7 @@ references: !ruby/object:Api::Resource::ReferenceLinks api: 'https://cloud.google.com/deploy/docs/api/reference/rest/v1/projects.locations.deliveryPipelines.automations' base_url: 'projects/{{project}}/locations/{{location}}/deliveryPipelines/{{delivery_pipeline}}/automations' self_link: 'projects/{{project}}/locations/{{location}}/deliveryPipelines/{{delivery_pipeline}}/automations/{{name}}' -min_version: beta + create_url: 'projects/{{project}}/locations/{{location}}/deliveryPipelines/{{delivery_pipeline}}/automations?automationId={{name}}' update_verb: :PATCH update_mask: true @@ -51,7 +51,6 @@ autogen_async: true examples: - !ruby/object:Provider::Terraform::Examples name: "clouddeploy_automation_basic" - min_version: beta primary_resource_id: "b-automation" vars: automation: "cd-automation" @@ -60,7 +59,7 @@ examples: service_account: :SERVICE_ACCT - !ruby/object:Provider::Terraform::Examples name: "clouddeploy_automation_full" - min_version: beta + primary_resource_id: "f-automation" vars: automation: "cd-automation" diff --git a/mmv1/products/clouddeploy/CustomTargetType.yaml b/mmv1/products/clouddeploy/CustomTargetType.yaml index d805ae20bbbd..49820c07a124 100644 --- a/mmv1/products/clouddeploy/CustomTargetType.yaml +++ b/mmv1/products/clouddeploy/CustomTargetType.yaml @@ -48,20 +48,28 @@ import_format: [ 'projects/{{project}}/locations/{{location}}/customTargetTypes/{{name}}' ] +iam_policy: !ruby/object:Api::Resource::IamPolicy + parent_resource_attribute: 'name' + method_name_separator: ':' + base_url: 'projects/{{project}}/locations/{{location}}/customTargetTypes/{{name}}' + import_format: ['projects/{{project}}/locations/{{location}}/customTargetTypes/{{name}}', '{{name}}'] examples: - !ruby/object:Provider::Terraform::Examples name: "clouddeploy_custom_target_type_basic" primary_resource_id: "custom-target-type" + primary_resource_name: 'fmt.Sprintf("tf-test-my-custom-target-type%s", context["random_suffix"])' vars: custom_target_type_name: "my-custom-target-type" - !ruby/object:Provider::Terraform::Examples name: "clouddeploy_custom_target_type_git_skaffold_modules" primary_resource_id: "custom-target-type" + primary_resource_name: 'fmt.Sprintf("tf-test-my-custom-target-type%s", context["random_suffix"])' vars: custom_target_type_name: "my-custom-target-type" - !ruby/object:Provider::Terraform::Examples name: "clouddeploy_custom_target_type_gcs_skaffold_modules" primary_resource_id: "custom-target-type" + primary_resource_name: 'fmt.Sprintf("tf-test-my-custom-target-type%s", context["random_suffix"])' vars: custom_target_type_name: "my-custom-target-type" parameters: diff --git a/mmv1/products/clouddeploy/Target.yaml b/mmv1/products/clouddeploy/Target.yaml new file mode 100644 index 000000000000..b8ab9d6fd154 --- /dev/null +++ b/mmv1/products/clouddeploy/Target.yaml @@ -0,0 +1,40 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- !ruby/object:Api::Resource +name: 'Target' +description: | + The Cloud Deploy `Target` resource. +base_url: 'projects/{{project}}/locations/{{location}}/targets' +self_link: 'projects/{{project}}/locations/{{location}}/targets/{{name}}' +exclude_resource: true +id_format: 'projects/{{project}}/locations/{{location}}/targets/{{name}}' +import_format: + - 'projects/{{project}}/locations/{{location}}/targets/{{name}}' +iam_policy: !ruby/object:Api::Resource::IamPolicy + parent_resource_attribute: 'name' + method_name_separator: ':' + base_url: 'projects/{{project}}/locations/{{location}}/targets/{{name}}' + import_format: ['projects/{{project}}/locations/{{location}}/targets/{{name}}', '{{name}}'] +examples: + - !ruby/object:Provider::Terraform::Examples + name: 'clouddeploy_target_basic' + primary_resource_id: 'default' + primary_resource_name: 'fmt.Sprintf("tf-test-cd-target%s", context["random_suffix"])' + vars: + target: 'cd-target' +properties: + - !ruby/object:Api::Type::String + name: "name" + description: "Dummy property." + required: true diff --git a/mmv1/products/cloudrunv2/Service.yaml b/mmv1/products/cloudrunv2/Service.yaml index 1d4c3b7a0895..bbedcc2cf10c 100644 --- a/mmv1/products/cloudrunv2/Service.yaml +++ b/mmv1/products/cloudrunv2/Service.yaml @@ -256,6 +256,16 @@ properties: One or more custom audiences that you want this service to support. Specify each custom audience as the full URL in a string. The custom audiences are encoded in the token and used to authenticate requests. For more information, see https://cloud.google.com/run/docs/configuring/custom-audiences. item_type: Api::Type::String + - !ruby/object:Api::Type::NestedObject + name: 'scaling' + min_version: beta + description: | + Scaling settings that apply to the whole service + properties: + - !ruby/object:Api::Type::Integer + name: 'minInstanceCount' + description: | + Minimum number of instances for the service, to be divided among all revisions receiving traffic. - !ruby/object:Api::Type::NestedObject name: 'template' required: true @@ -430,7 +440,8 @@ properties: - !ruby/object:Api::Type::Boolean name: 'cpuIdle' description: |- - Determines whether CPU should be throttled or not outside of requests. + Determines whether CPU is only allocated during requests. True by default if the parent `resources` field is not set. However, if + `resources` is set, this field must be explicitly set to true to preserve the default behavior. - !ruby/object:Api::Type::Boolean name: 'startupCpuBoost' description: |- diff --git a/mmv1/products/compute/RegionAutoscaler.yaml b/mmv1/products/compute/RegionAutoscaler.yaml index 931e07443408..fa9991ee61aa 100644 --- a/mmv1/products/compute/RegionAutoscaler.yaml +++ b/mmv1/products/compute/RegionAutoscaler.yaml @@ -265,7 +265,6 @@ properties: required: true - !ruby/object:Api::Type::Double name: 'singleInstanceAssignment' - min_version: beta description: | If scaling is based on a per-group metric value that represents the total amount of work to be done or resource usage, set this value to @@ -341,7 +340,6 @@ properties: (if you are using gce_instance resource type). If multiple TimeSeries are returned upon the query execution, the autoscaler will sum their respective values to obtain its scaling value. - min_version: beta - !ruby/object:Api::Type::NestedObject name: 'loadBalancingUtilization' description: | diff --git a/mmv1/products/compute/RegionNetworkEndpointGroup.yaml b/mmv1/products/compute/RegionNetworkEndpointGroup.yaml index 2dc6d82c64cc..177ce9001bf1 100644 --- a/mmv1/products/compute/RegionNetworkEndpointGroup.yaml +++ b/mmv1/products/compute/RegionNetworkEndpointGroup.yaml @@ -68,6 +68,11 @@ examples: primary_resource_id: 'appengine_neg' vars: neg_name: 'appengine-neg' + - !ruby/object:Provider::Terraform::Examples + name: 'region_network_endpoint_group_appengine_empty' + primary_resource_id: 'appengine_neg' + vars: + neg_name: 'appengine-neg' - !ruby/object:Provider::Terraform::Examples name: 'region_network_endpoint_group_psc' primary_resource_id: 'psc_neg' @@ -209,6 +214,7 @@ properties: - cloud_function - serverless_deployment allow_empty_object: true + send_empty_value: true description: | This field is only used for SERVERLESS NEGs. @@ -278,6 +284,7 @@ properties: - cloud_function - app_engine allow_empty_object: true + send_empty_value: true description: | This field is only used for SERVERLESS NEGs. diff --git a/mmv1/products/compute/RegionTargetHttpsProxy.yaml b/mmv1/products/compute/RegionTargetHttpsProxy.yaml index 723d03ad87af..553bd1a5655a 100644 --- a/mmv1/products/compute/RegionTargetHttpsProxy.yaml +++ b/mmv1/products/compute/RegionTargetHttpsProxy.yaml @@ -41,6 +41,9 @@ async: !ruby/object:Api::OpAsync error: !ruby/object:Api::OpAsync::Error path: 'error/errors' message: 'message' +custom_code: !ruby/object:Provider::Terraform::CustomCode + encoder: templates/terraform/encoders/compute_region_target_https_proxy.go.erb + decoder: templates/terraform/decoders/compute_region_target_https_proxy.go.erb examples: - !ruby/object:Provider::Terraform::Examples name: 'region_target_https_proxy_basic' @@ -51,6 +54,14 @@ examples: region_url_map_name: 'url-map' region_backend_service_name: 'backend-service' region_health_check_name: 'http-health-check' + - !ruby/object:Provider::Terraform::Examples + name: 'region_target_https_proxy_certificate_manager_certificate' + primary_resource_id: 'default' + vars: + region_target_https_proxy_name: 'target-http-proxy' + certificate_manager_certificate_name: 'my-certificate' + region_url_map_name: 'url-map' + region_backend_service_name: 'backend-service' parameters: - !ruby/object:Api::Type::ResourceRef name: 'region' @@ -109,13 +120,26 @@ properties: # update_verb: :POST # update_url: # 'projects/{{project}}/regions/{{region}}/targetHttpsProxies/{{name}}/setQuicOverride' + - !ruby/object:Api::Type::Array + name: 'certificateManagerCertificates' + description: | + URLs to certificate manager certificate resources that are used to authenticate connections between users and the load balancer. + Currently, you may specify up to 15 certificates. Certificate manager certificates do not apply when the load balancing scheme is set to INTERNAL_SELF_MANAGED. + sslCertificates and certificateManagerCertificates fields can not be defined together. + Accepted format is `//certificatemanager.googleapis.com/projects/{project}/locations/{location}/certificates/{resourceName}` or just the self_link `projects/{project}/locations/{location}/certificates/{resourceName}` + update_verb: :POST + update_url: 'projects/{{project}}/regions/{{region}}/targetHttpsProxies/{{name}}/setSslCertificates' + item_type: Api::Type::String + custom_expand: 'templates/terraform/custom_expand/certificate_manager_certificate_construct_full_url.go.erb' + diff_suppress_func: 'tpgresource.CompareResourceNames' + conflicts: + - ssl_certificates - !ruby/object:Api::Type::Array name: 'sslCertificates' description: | - A list of RegionSslCertificate resources that are used to authenticate - connections between users and the load balancer. Currently, exactly - one SSL certificate must be specified. - required: true + URLs to SslCertificate resources that are used to authenticate connections between users and the load balancer. + At least one SSL certificate must be specified. Currently, you may specify up to 15 SSL certificates. + sslCertificates do not apply when the load balancing scheme is set to INTERNAL_SELF_MANAGED. update_verb: :POST update_url: 'projects/{{project}}/regions/{{region}}/targetHttpsProxies/{{name}}/setSslCertificates' item_type: !ruby/object:Api::Type::ResourceRef @@ -124,6 +148,8 @@ properties: imports: 'selfLink' description: 'The SSL certificates used by this TargetHttpsProxy' custom_expand: 'templates/terraform/custom_expand/array_resourceref_with_validation.go.erb' + conflicts: + - certificate_manager_certificates - !ruby/object:Api::Type::ResourceRef name: 'sslPolicy' resource: 'RegionSslPolicy' diff --git a/mmv1/products/compute/ServiceAttachment.yaml b/mmv1/products/compute/ServiceAttachment.yaml index f913840817a1..a7609767f815 100644 --- a/mmv1/products/compute/ServiceAttachment.yaml +++ b/mmv1/products/compute/ServiceAttachment.yaml @@ -68,6 +68,20 @@ examples: producer_forwarding_rule_name: 'producer-forwarding-rule' consumer_address_name: 'psc-ilb-consumer-address' consumer_forwarding_rule_name: 'psc-ilb-consumer-forwarding-rule' + - !ruby/object:Provider::Terraform::Examples + name: 'service_attachment_explicit_networks' + primary_resource_id: 'psc_ilb_service_attachment' + vars: + service_attachment_name: 'my-psc-ilb' + network_name: 'psc-ilb-network' + nat_subnetwork_name: 'psc-ilb-nat' + producer_subnetwork_name: 'psc-ilb-producer-subnetwork' + producer_health_check_name: 'producer-service-health-check' + producer_service_name: 'producer-service' + producer_forwarding_rule_name: 'producer-forwarding-rule' + consumer_network_name: 'psc-ilb-consumer-network' + consumer_address_name: 'psc-ilb-consumer-address' + consumer_forwarding_rule_name: 'psc-ilb-consumer-forwarding-rule' - !ruby/object:Provider::Terraform::Examples name: 'service_attachment_reconcile_connections' primary_resource_id: 'psc_ilb_service_attachment' @@ -82,6 +96,7 @@ examples: consumer_address_name: 'psc-ilb-consumer-address' consumer_forwarding_rule_name: 'psc-ilb-consumer-forwarding-rule' custom_code: !ruby/object:Provider::Terraform::CustomCode + constants: templates/terraform/constants/compute_service_attachment.go.erb update_encoder: 'templates/terraform/update_encoder/compute_service_attachment.go.erb' parameters: - !ruby/object:Api::Type::ResourceRef @@ -194,13 +209,22 @@ properties: attachment. send_empty_value: true is_set: true + set_hash_func: computeServiceAttachmentConsumerAcceptListsHash item_type: !ruby/object:Api::Type::NestedObject properties: - !ruby/object:Api::Type::String name: 'projectIdOrNum' - required: true + # TODO (laurensknoll): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) description: | A project that is allowed to connect to this service attachment. + Only one of project_id_or_num and network_url may be set. + - !ruby/object:Api::Type::String + name: 'networkUrl' + # TODO (laurensknoll): add exactly_one_of when it can be applied to lists (https://github.com/hashicorp/terraform-plugin-sdk/issues/470) + description: | + The network that is allowed to connect to this service attachment. + Only one of project_id_or_num and network_url may be set. + diff_suppress_func: 'tpgresource.CompareSelfLinkRelativePaths' - !ruby/object:Api::Type::Integer name: 'connectionLimit' required: true diff --git a/mmv1/products/datafusion/go_instance.yaml b/mmv1/products/datafusion/go_instance.yaml new file mode 100644 index 000000000000..729cf2079292 --- /dev/null +++ b/mmv1/products/datafusion/go_instance.yaml @@ -0,0 +1,298 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Warning: This is a temporary file, and should not be edited directly +--- +name: 'Instance' +description: | + Represents a Data Fusion instance. +references: + guides: + 'Official Documentation': 'https://cloud.google.com/data-fusion/docs/' + api: 'https://cloud.google.com/data-fusion/docs/reference/rest/v1beta1/projects.locations.instances' +docs: +base_url: 'projects/{{project}}/locations/{{region}}/instances' +create_url: 'projects/{{project}}/locations/{{region}}/instances?instanceId={{name}}' +update_verb: 'PATCH' +timeouts: + insert_minutes: 90 + update_minutes: 25 + delete_minutes: 50 +autogen_async: true +async: + operation: + base_url: '{{op_id}}' + path: 'name' + wait_ms: 1000 + result: + path: 'response' + resource_inside_response: true + error: + path: 'error' + message: 'message' +iam_policy: + method_name_separator: ':' + parent_resource_attribute: 'name' + import_format: + - 'projects/{{project}}/locations/{{location}}/instances/{{name}}' + - '{{name}}' +custom_code: + constants: 'templates/terraform/constants/data_fusion_instance_option.go.erb' + pre_update: 'templates/terraform/pre_update/datafusion_instance_update.go.erb' +examples: + - name: 'data_fusion_instance_basic' + primary_resource_id: 'basic_instance' + primary_resource_name: 'basic_instance' + vars: + instance_name: 'my-instance' + prober_test_run: '' + test_vars_overrides: + 'prober_test_run': '`options = { prober_test_run = "true" }`' + - name: 'data_fusion_instance_full' + primary_resource_id: 'extended_instance' + primary_resource_name: 'extended_instance' + vars: + instance_name: 'my-instance' + ip_alloc: 'datafusion-ip-alloc' + network_name: 'datafusion-full-network' + prober_test_run: '' + test_vars_overrides: + 'prober_test_run': '`options = { prober_test_run = "true" }`' + - name: 'data_fusion_instance_cmek' + primary_resource_id: 'cmek' + primary_resource_name: 'cmek' + vars: + instance_name: 'my-instance' + - name: 'data_fusion_instance_enterprise' + primary_resource_id: 'enterprise_instance' + primary_resource_name: 'enterprise_instance' + vars: + instance_name: 'my-instance' + prober_test_run: '' + test_vars_overrides: + 'prober_test_run': '`options = { prober_test_run = "true" }`' + - name: 'data_fusion_instance_event' + primary_resource_id: 'event' + primary_resource_name: 'event' + vars: + instance_name: 'my-instance' + - name: 'data_fusion_instance_zone' + primary_resource_id: 'zone' + primary_resource_name: 'zone' + vars: + instance_name: 'my-instance' +parameters: + - name: 'region' + type: String + description: "The region of the Data Fusion instance." + url_param_only: true + required: false + immutable: true + ignore_read: true + default_from_api: true +properties: + - name: 'name' + type: String + description: "The ID of the instance or a fully qualified identifier for the instance." + required: true + immutable: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.erb' + custom_expand: 'templates/terraform/custom_expand/shortname_to_url.go.erb' + - name: 'description' + type: String + description: "An optional description of the instance." + immutable: true + - name: 'type' + type: Enum + description: "Represents the type of Data Fusion instance. Each type is configured with +the default settings for processing and memory. +- BASIC: Basic Data Fusion instance. In Basic type, the user will be able to create data pipelines +using point and click UI. However, there are certain limitations, such as fewer number +of concurrent pipelines, no support for streaming pipelines, etc. +- ENTERPRISE: Enterprise Data Fusion instance. In Enterprise type, the user will have more features +available, such as support for streaming pipelines, higher number of concurrent pipelines, etc. +- DEVELOPER: Developer Data Fusion instance. In Developer type, the user will have all features available but +with restrictive capabilities. This is to help enterprises design and develop their data ingestion and integration +pipelines at low cost." + required: true + immutable: true + enum_values: + - 'BASIC' + - 'ENTERPRISE' + - 'DEVELOPER' + - name: 'enableStackdriverLogging' + type: Boolean + description: "Option to enable Stackdriver Logging." + - name: 'enableStackdriverMonitoring' + type: Boolean + description: "Option to enable Stackdriver Monitoring." + - name: 'enableRbac' + type: Boolean + description: "Option to enable granular role-based access control." + - name: 'labels' + type: KeyValueLabels + description: "The resource labels for instance to use to annotate any related underlying resources, +such as Compute Engine VMs. + + +**Note**: This field is non-authoritative, and will only manage the labels present in your configuration. +Please refer to the field `effective_labels` for all of the labels present on the resource." + immutable: false + - name: 'options' + type: KeyValuePairs + description: "Map of additional options used to configure the behavior of Data Fusion instance." + immutable: true + default_from_api: true + diff_suppress_func: 'instanceOptionsDiffSuppress' + - name: 'createTime' + type: String + description: "The time the instance was created in RFC3339 UTC 'Zulu' format, accurate to nanoseconds." + output: true + - name: 'updateTime' + type: String + description: "The time the instance was last updated in RFC3339 UTC 'Zulu' format, accurate to nanoseconds." + output: true + - name: 'state' + type: Enum + description: "The current state of this Data Fusion instance. +- CREATING: Instance is being created +- RUNNING: Instance is running and ready for requests +- FAILED: Instance creation failed +- DELETING: Instance is being deleted +- UPGRADING: Instance is being upgraded +- RESTARTING: Instance is being restarted" + output: true + enum_values: + - 'CREATING' + - 'RUNNING' + - 'FAILED' + - 'DELETING' + - 'UPGRADING' + - 'RESTARTING' + - name: 'stateMessage' + type: String + description: "Additional information about the current state of this Data Fusion instance if available." + output: true + - name: 'serviceEndpoint' + type: String + description: "Endpoint on which the Data Fusion UI and REST APIs are accessible." + output: true + - name: 'version' + type: String + description: "Current version of the Data Fusion." + default_from_api: true + - name: 'serviceAccount' + type: String + description: "Service account which will be used to access resources in the customer project." + min_version: 'beta' + output: true + deprecation_message: '`service_account` is deprecated and will be removed in a future major release. Instead, use `tenant_project_id` to extract the tenant project ID.' + - name: 'privateInstance' + type: Boolean + description: "Specifies whether the Data Fusion instance should be private. If set to +true, all Data Fusion nodes will have private IP addresses and will not be +able to access the public internet." + immutable: true + - name: 'dataprocServiceAccount' + type: String + description: "User-managed service account to set on Dataproc when Cloud Data Fusion creates Dataproc to run data processing pipelines." + immutable: true + - name: 'tenantProjectId' + type: String + description: "The name of the tenant project." + output: true + - name: 'gcsBucket' + type: String + description: "Cloud Storage bucket generated by Data Fusion in the customer project." + output: true + - name: 'networkConfig' + type: NestedObject + description: "Network configuration options. These are required when a private Data Fusion instance is to be created." + immutable: true + properties: + - name: 'ipAllocation' + type: String + description: "The IP range in CIDR notation to use for the managed Data Fusion instance + nodes. This range must not overlap with any other ranges used in the Data Fusion instance network." + required: true + immutable: true + - name: 'network' + type: String + description: "Name of the network in the project with which the tenant project + will be peered for executing pipelines. In case of shared VPC where the network resides in another host + project the network should specified in the form of projects/{host-project-id}/global/networks/{network}" + required: true + immutable: true + - name: 'zone' + type: String + description: "Name of the zone in which the Data Fusion instance will be created. Only DEVELOPER instances use this field." + immutable: true + default_from_api: true + - name: 'displayName' + type: String + description: "Display name for an instance." + immutable: true + - name: 'apiEndpoint' + type: String + description: "Endpoint on which the REST APIs is accessible." + output: true + - name: 'p4ServiceAccount' + type: String + description: "P4 service account for the customer project." + output: true + - name: 'cryptoKeyConfig' + type: NestedObject + description: "The crypto key configuration. This field is used by the Customer-Managed Encryption Keys (CMEK) feature." + immutable: true + properties: + - name: 'keyReference' + type: String + description: "The name of the key which is used to encrypt/decrypt customer data. For key in Cloud KMS, the key should be in the format of projects/*/locations/*/keyRings/*/cryptoKeys/*." + required: true + immutable: true + - name: 'eventPublishConfig' + type: NestedObject + description: "Option to enable and pass metadata for event publishing." + properties: + - name: 'enabled' + type: Boolean + description: "Option to enable Event Publishing." + required: true + - name: 'topic' + type: String + description: "The resource name of the Pub/Sub topic. Format: projects/{projectId}/topics/{topic_id}" + required: true + immutable: true + - name: 'accelerators' + type: Array + description: "List of accelerators enabled for this CDF instance. + +If accelerators are enabled it is possible a permadiff will be created with the Options field. +Users will need to either manually update their state file to include these diffed options, or include the field in a [lifecycle ignore changes block](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes)." + item_type: NestedObject + properties: + - name: 'acceleratorType' + type: Enum + description: "The type of an accelator for a CDF instance." + required: true + enum_values: + - 'CDC' + - 'HEALTHCARE' + - 'CCAI_INSIGHTS' + - name: 'state' + type: Enum + description: "The type of an accelator for a CDF instance." + required: true + enum_values: + - 'ENABLED' + - 'DISABLED' diff --git a/mmv1/products/datafusion/go_product.yaml b/mmv1/products/datafusion/go_product.yaml new file mode 100644 index 000000000000..69c8c197dddc --- /dev/null +++ b/mmv1/products/datafusion/go_product.yaml @@ -0,0 +1,35 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Warning: This is a temporary file, and should not be edited directly +--- +name: 'DataFusion' +display_name: 'Cloud Data Fusion' +versions: + - name: 'beta' + base_url: 'https://datafusion.googleapis.com/v1beta1/' + - name: 'ga' + base_url: 'https://datafusion.googleapis.com/v1/' +scopes: + - 'https://www.googleapis.com/auth/cloud-platform' +async: + operation: + base_url: '{{op_id}}' + path: 'name' + wait_ms: 1000 + result: + path: 'response' + resource_inside_response: true + error: + path: 'error' + message: 'message' diff --git a/mmv1/products/datapipeline/Pipeline.yaml b/mmv1/products/datapipeline/Pipeline.yaml index c3e24c9fcd9e..1bf821d94bff 100644 --- a/mmv1/products/datapipeline/Pipeline.yaml +++ b/mmv1/products/datapipeline/Pipeline.yaml @@ -31,6 +31,8 @@ examples: primary_resource_name: 'fmt.Sprintf("tf-test-my-pipeline%s", context["random_suffix"])' primary_resource_id: 'primary' + ignore_read_extra: + - 'schedule_info.0.next_job_time' vars: pipeline_name: 'my-pipeline' account_id: 'my-account' diff --git a/mmv1/products/firebaseappcheck/DeviceCheckConfig.yaml b/mmv1/products/firebaseappcheck/DeviceCheckConfig.yaml new file mode 100644 index 000000000000..3b708a93471d --- /dev/null +++ b/mmv1/products/firebaseappcheck/DeviceCheckConfig.yaml @@ -0,0 +1,97 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- +!ruby/object:Api::Resource +name: "DeviceCheckConfig" +base_url: projects/{{project}}/apps/{{app_id}}/deviceCheckConfig +self_link: projects/{{project}}/apps/{{app_id}}/deviceCheckConfig +create_url: projects/{{project}}/apps/{{app_id}}/deviceCheckConfig?updateMask=tokenTtl,keyId,privateKey +create_verb: :PATCH +update_verb: :PATCH +update_mask: true +skip_delete: true +description: | + An app's DeviceCheck configuration object. Note that the Team ID registered with your + app is used as part of the validation process. Make sure your `google_firebase_apple_app` has a team_id present. +references: !ruby/object:Api::Resource::ReferenceLinks + guides: + "Official Documentation": "https://firebase.google.com/docs/app-check" + api: "https://firebase.google.com/docs/reference/appcheck/rest/v1/projects.apps.deviceCheckConfig" +import_format: + [ + "projects/{{project}}/apps/{{app_id}}/deviceCheckConfig", + "{{project}}/{{app_id}}", + "{{app_id}}", + ] +examples: + - !ruby/object:Provider::Terraform::Examples + name: "firebase_app_check_device_check_config_full" + min_version: 'beta' + # Need the time_sleep resource + pull_external: true + primary_resource_id: "default" + vars: + bundle_id: "bundle.id.devicecheck" + key_id: "Key ID" + private_key_path: "path/to/private-key.p8" + team_id: "9987654321" + token_ttl: "7200s" + test_vars_overrides: + # Don't add random suffix + private_key_path: '"test-fixtures/private-key-2.p8"' + team_id: '"9987654321"' + token_ttl: '"7200s"' + test_env_vars: + project_id: :PROJECT_NAME +parameters: + - !ruby/object:Api::Type::String + name: app_id + description: | + The ID of an + [Apple App](https://firebase.google.com/docs/reference/firebase-management/rest/v1beta1/projects.iosApps#IosApp.FIELDS.app_id). + required: true + immutable: true + url_param_only: true +properties: + - !ruby/object:Api::Type::String + name: name + description: | + The relative resource name of the DeviceCheck configuration object + output: true + - !ruby/object:Api::Type::String + name: tokenTtl + description: | + Specifies the duration for which App Check tokens exchanged from DeviceCheck artifacts will be valid. + If unset, a default value of 1 hour is assumed. Must be between 30 minutes and 7 days, inclusive. + + A duration in seconds with up to nine fractional digits, ending with 's'. Example: "3.5s". + default_from_api: true + - !ruby/object:Api::Type::String + name: keyId + description: | + The key identifier of a private key enabled with DeviceCheck, created in your Apple Developer account. + required: true + - !ruby/object:Api::Type::String + name: privateKey + description: | + The contents of the private key (.p8) file associated with the key specified by keyId. + required: true + sensitive: true + ignore_read: true + - !ruby/object:Api::Type::Boolean + name: privateKeySet + description: | + Whether the privateKey field was previously set. Since App Check will never return the + privateKey field, this field is the only way to find out whether it was previously set. + output: true diff --git a/mmv1/products/firestore/Database.yaml b/mmv1/products/firestore/Database.yaml index 051872d7bc1a..d294f94c147a 100644 --- a/mmv1/products/firestore/Database.yaml +++ b/mmv1/products/firestore/Database.yaml @@ -78,6 +78,23 @@ examples: - project - etag - deletion_policy + - !ruby/object:Provider::Terraform::Examples + name: 'firestore_cmek_database' + min_version: beta + primary_resource_id: 'database' + vars: + database_id: "cmek-database-id" + delete_protection_state: "DELETE_PROTECTION_ENABLED" + kms_key_ring_name: "kms-key-ring" + kms_key_name: "kms-key" + test_env_vars: + project_id: :PROJECT_NAME + test_vars_overrides: + delete_protection_state: '"DELETE_PROTECTION_DISABLED"' + ignore_read_extra: + - project + - etag + - deletion_policy - !ruby/object:Provider::Terraform::Examples name: 'firestore_default_database_in_datastore_mode' primary_resource_id: 'datastore_mode_database' @@ -102,6 +119,23 @@ examples: - project - etag - deletion_policy + - !ruby/object:Provider::Terraform::Examples + name: 'firestore_cmek_database_in_datastore_mode' + min_version: beta + primary_resource_id: 'database' + vars: + database_id: "cmek-database-id" + delete_protection_state: "DELETE_PROTECTION_ENABLED" + kms_key_ring_name: "kms-key-ring" + kms_key_name: "kms-key" + test_env_vars: + project_id: :PROJECT_NAME + test_vars_overrides: + delete_protection_state: '"DELETE_PROTECTION_DISABLED"' + ignore_read_extra: + - project + - etag + - deletion_policy virtual_fields: - !ruby/object:Api::Type::Enum name: 'deletion_policy' @@ -234,3 +268,41 @@ properties: This value is continuously updated, and becomes stale the moment it is queried. If you are using this value to recover data, make sure to account for the time from the moment when the value is queried to the moment when you initiate the recovery. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". output: true + - !ruby/object:Api::Type::NestedObject + name: cmekConfig + min_version: beta + immutable: true + description: | + The CMEK (Customer Managed Encryption Key) configuration for a Firestore + database. If not present, the database is secured by the default Google + encryption key. + properties: + - !ruby/object:Api::Type::String + name: kmsKeyName + required: true + immutable: true + description: | + The resource ID of a Cloud KMS key. If set, the database created will + be a Customer-managed Encryption Key (CMEK) database encrypted with + this key. This feature is allowlist only in initial launch. + + Only keys in the same location as this database are allowed to be used + for encryption. For Firestore's nam5 multi-region, this corresponds to Cloud KMS + multi-region us. For Firestore's eur3 multi-region, this corresponds to + Cloud KMS multi-region europe. See https://cloud.google.com/kms/docs/locations. + + This value should be the KMS key resource ID in the format of + `projects/{project_id}/locations/{kms_location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`. + How to retrive this resource ID is listed at + https://cloud.google.com/kms/docs/getting-resource-ids#getting_the_id_for_a_key_and_version. + - !ruby/object:Api::Type::Array + name: activeKeyVersion + output: true + description: | + Currently in-use KMS key versions (https://cloud.google.com/kms/docs/resource-hierarchy#key_versions). + During key rotation (https://cloud.google.com/kms/docs/key-rotation), there can be + multiple in-use key versions. + + The expected format is + `projects/{project_id}/locations/{kms_location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}/cryptoKeyVersions/{key_version}`. + item_type: Api::Type::String diff --git a/mmv1/products/gkebackup/BackupPlan.yaml b/mmv1/products/gkebackup/BackupPlan.yaml index f3937b278d0c..26733710637e 100644 --- a/mmv1/products/gkebackup/BackupPlan.yaml +++ b/mmv1/products/gkebackup/BackupPlan.yaml @@ -105,7 +105,6 @@ examples: deletion_protection: 'false' test_env_vars: project: :PROJECT_NAME -skip_sweeper: true parameters: - !ruby/object:Api::Type::String name: 'location' diff --git a/mmv1/products/gkebackup/RestorePlan.yaml b/mmv1/products/gkebackup/RestorePlan.yaml index 479946268fda..38bcb5216b5c 100644 --- a/mmv1/products/gkebackup/RestorePlan.yaml +++ b/mmv1/products/gkebackup/RestorePlan.yaml @@ -134,7 +134,6 @@ examples: subnetwork_name: 'acctest.BootstrapSubnet(t, "gke-cluster", acctest.BootstrapSharedTestNetwork(t, "gke-cluster"))' oics_vars_overrides: deletion_protection: 'false' -skip_sweeper: true parameters: - !ruby/object:Api::Type::String name: 'location' diff --git a/mmv1/products/gkehub2/Scope.yaml b/mmv1/products/gkehub2/Scope.yaml index 7c12b64c03b4..ff73465a443e 100644 --- a/mmv1/products/gkehub2/Scope.yaml +++ b/mmv1/products/gkehub2/Scope.yaml @@ -117,6 +117,14 @@ properties: - :READY - :DELETING - :UPDATING + - !ruby/object:Api::Type::KeyValuePairs + name: 'namespaceLabels' + description: | + Scope-level cluster namespace labels. For the member clusters bound + to the Scope, these labels are applied to each namespace under the + Scope. Scope-level labels take precedence over Namespace-level + labels (`namespace_labels` in the Fleet Namespace resource) if they + share a key. Keys and values must be Kubernetes-conformant. - !ruby/object:Api::Type::KeyValueLabels name: 'labels' description: | diff --git a/mmv1/products/gkeonprem/VmwareCluster.yaml b/mmv1/products/gkeonprem/VmwareCluster.yaml index 26a32312ce27..0e2e265fac87 100644 --- a/mmv1/products/gkeonprem/VmwareCluster.yaml +++ b/mmv1/products/gkeonprem/VmwareCluster.yaml @@ -252,7 +252,8 @@ properties: description: vcenter_network specifies vCenter network name. Inherited from the admin cluster. - output: true + immutable: true + default_from_api: true - !ruby/object:Api::Type::NestedObject name: 'hostConfig' description: diff --git a/mmv1/products/kms/CryptoKeyVersion.yaml b/mmv1/products/kms/CryptoKeyVersion.yaml index b193fee87c02..4fc87f93a051 100644 --- a/mmv1/products/kms/CryptoKeyVersion.yaml +++ b/mmv1/products/kms/CryptoKeyVersion.yaml @@ -104,15 +104,18 @@ properties: description: | The certificate chains needed to validate the attestation properties: - - !ruby/object:Api::Type::String + - !ruby/object:Api::Type::Array + item_type: Api::Type::String name: 'caviumCerts' description: | Cavium certificate chain corresponding to the attestation. - - !ruby/object:Api::Type::String + - !ruby/object:Api::Type::Array + item_type: Api::Type::String name: 'googleCardCerts' description: | Google card certificate chain corresponding to the attestation. - - !ruby/object:Api::Type::String + - !ruby/object:Api::Type::Array + item_type: Api::Type::String name: 'googlePartitionCerts' description: | Google partition certificate chain corresponding to the attestation. diff --git a/mmv1/products/kms/EkmConnection.yaml b/mmv1/products/kms/EkmConnection.yaml new file mode 100644 index 000000000000..191bf70d852d --- /dev/null +++ b/mmv1/products/kms/EkmConnection.yaml @@ -0,0 +1,166 @@ +# Copyright 2023 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +--- !ruby/object:Api::Resource +name: 'EkmConnection' +base_url: 'projects/{{project}}/locations/{{location}}/ekmConnections' +create_url: 'projects/{{project}}/locations/{{location}}/ekmConnections?ekmConnectionId={{name}}' +self_link: 'projects/{{project}}/locations/{{location}}/ekmConnections/{{name}}' +update_verb: :PATCH +update_mask: true +description: | + `Ekm Connections` are used to control the connection settings for an `EXTERNAL_VPC` CryptoKey. + It is used to connect customer's external key manager to Google Cloud EKM. + + + ~> **Note:** Ekm Connections cannot be deleted from Google Cloud Platform. +references: !ruby/object:Api::Resource::ReferenceLinks + guides: + 'Creating a Ekm Connection': 'https://cloud.google.com/kms/docs/create-ekm-connection' + api: 'https://cloud.google.com/kms/docs/reference/rest/v1/projects.locations.ekmConnections' +id_format: 'projects/{{project}}/locations/{{location}}/ekmConnections/{{name}}' +import_format: ['projects/{{project}}/locations/{{location}}/ekmConnections/{{name}}'] +skip_delete: true +examples: + - !ruby/object:Provider::Terraform::Examples + name: 'kms_ekm_connection_basic' + primary_resource_id: + 'example-ekmconnection' + skip_test: true + vars: + ekmconnection_name: 'ekmconnection_example' +parameters: + - !ruby/object:Api::Type::String + name: 'location' + description: | + The location for the EkmConnection. + A full list of valid locations can be found by running `gcloud kms locations list`. + required: true + ignore_read: true + url_param_only: true + immutable: true +properties: + - !ruby/object:Api::Type::String + name: 'name' + description: | + The resource name for the EkmConnection. + required: true + immutable: true + diff_suppress_func: 'tpgresource.CompareResourceNames' + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.erb' + - !ruby/object:Api::Type::Array + name: 'serviceResolvers' + description: | + A list of ServiceResolvers where the EKM can be reached. There should be one ServiceResolver per EKM replica. Currently, only a single ServiceResolver is supported + required: true + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'serviceDirectoryService' + description: | + Required. The resource name of the Service Directory service pointing to an EKM replica, in the format projects/*/locations/*/namespaces/*/services/* + required: true + - !ruby/object:Api::Type::String + name: 'hostname' + description: | + Required. The hostname of the EKM replica used at TLS and HTTP layers. + required: true + - !ruby/object:Api::Type::Array + name: 'serverCertificates' + description: | + Required. A list of leaf server certificates used to authenticate HTTPS connections to the EKM replica. Currently, a maximum of 10 Certificate is supported. + required: true + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'rawDer' + description: | + Required. The raw certificate bytes in DER format. A base64-encoded string. + required: true + - !ruby/object:Api::Type::Boolean + name: 'parsed' + description: | + Output only. True if the certificate was parsed successfully. + output: true + - !ruby/object:Api::Type::String + name: 'issuer' + description: | + Output only. The issuer distinguished name in RFC 2253 format. Only present if parsed is true. + output: true + - !ruby/object:Api::Type::String + name: 'subject' + description: | + Output only. The subject distinguished name in RFC 2253 format. Only present if parsed is true. + output: true + - !ruby/object:Api::Type::String + name: 'notBeforeTime' + description: | + Output only. The certificate is not valid before this time. Only present if parsed is true. + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + output: true + - !ruby/object:Api::Type::String + name: 'notAfterTime' + description: | + Output only. The certificate is not valid after this time. Only present if parsed is true. + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + output: true + - !ruby/object:Api::Type::String + name: 'sha256Fingerprint' + description: | + Output only. The SHA-256 certificate fingerprint as a hex string. Only present if parsed is true. + output: true + - !ruby/object:Api::Type::String + name: 'serialNumber' + description: | + Output only. The certificate serial number as a hex string. Only present if parsed is true. + output: true + - !ruby/object:Api::Type::Array + name: 'subjectAlternativeDnsNames' + description: | + Output only. The subject Alternative DNS names. Only present if parsed is true. + output: true + default_from_api: true + item_type: Api::Type::String + - !ruby/object:Api::Type::String + name: 'endpointFilter' + description: | + Optional. The filter applied to the endpoints of the resolved service. If no filter is specified, all endpoints will be considered. An endpoint will be chosen arbitrarily from the filtered list for each request. For endpoint filter syntax and examples, see https://cloud.google.com/service-directory/docs/reference/rpc/google.cloud.servicedirectory.v1#resolveservicerequest. + required: false + default_from_api: true + - !ruby/object:Api::Type::Enum + name: 'keyManagementMode' + description: | + Optional. Describes who can perform control plane operations on the EKM. If unset, this defaults to MANUAL + required: false + default_value: :MANUAL + values: + - :MANUAL + - :CLOUD_KMS + - !ruby/object:Api::Type::String + name: 'etag' + required: false + default_from_api: true + description: | + Optional. Etag of the currently stored EkmConnection. + - !ruby/object:Api::Type::String + name: 'cryptoSpacePath' + description: | + Optional. Identifies the EKM Crypto Space that this EkmConnection maps to. Note: This field is required if KeyManagementMode is CLOUD_KMS. + required: false + default_from_api: true + - !ruby/object:Api::Type::String + name: 'createTime' + description: | + Output only. The time at which the EkmConnection was created. + A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z". + output: true diff --git a/mmv1/products/pubsub/go_Schema.yaml b/mmv1/products/pubsub/go_Schema.yaml new file mode 100644 index 000000000000..edcdbbd7d7d5 --- /dev/null +++ b/mmv1/products/pubsub/go_Schema.yaml @@ -0,0 +1,84 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Warning: This is a temporary file, and should not be edited directly +--- +name: 'Schema' +description: | + A schema is a format that messages must follow, + creating a contract between publisher and subscriber that Pub/Sub will enforce. +references: + guides: + 'Creating and managing schemas': 'https://cloud.google.com/pubsub/docs/schemas' + api: 'https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.schemas' +docs: +base_url: 'projects/{{project}}/schemas' +create_url: 'projects/{{project}}/schemas?schemaId={{name}}' +update_url: 'projects/{{project}}/schemas/{{name}}:commit' +update_verb: 'POST' +update_mask: false +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + check_response_func_existence: 'transport_tpg.PollCheckForExistence' + check_response_func_absence: 'transport_tpg.PollCheckForAbsence' + suppress_error: false + target_occurrences: 10 +iam_policy: + method_name_separator: ':' + parent_resource_attribute: 'schema' +custom_code: + update_encoder: 'templates/terraform/update_encoder/pubsub_schema.erb' +examples: + - name: 'pubsub_schema_basic' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + schema_name: 'example-schema' + - name: 'pubsub_schema_protobuf' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + schema_name: 'example' + test_env_vars: + project_name: 'PROJECT_NAME' +parameters: + - name: 'name' + type: String + description: "The ID to use for the schema, which will become the final component of the schema's resource name." + required: true + immutable: true + diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.erb' + custom_expand: 'templates/terraform/custom_expand/resource_from_self_link.go.erb' +properties: + - name: 'type' + type: Enum + description: "The type of the schema definition" + default_value: TYPE_UNSPECIFIED + enum_values: + - 'TYPE_UNSPECIFIED' + - 'PROTOCOL_BUFFER' + - 'AVRO' + - '' + - name: 'definition' + type: String + description: "The definition of the schema. +This should contain a string representing the full definition of the schema +that is a valid schema definition of the type specified in type. Changes +to the definition commit new [schema revisions](https://cloud.google.com/pubsub/docs/commit-schema-revision). +A schema can only have up to 20 revisions, so updates that fail with an +error indicating that the limit has been reached require manually +[deleting old revisions](https://cloud.google.com/pubsub/docs/delete-schema-revision)." diff --git a/mmv1/products/pubsub/go_Subscription.yaml b/mmv1/products/pubsub/go_Subscription.yaml new file mode 100644 index 000000000000..ad7bd11d8ddd --- /dev/null +++ b/mmv1/products/pubsub/go_Subscription.yaml @@ -0,0 +1,420 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Warning: This is a temporary file, and should not be edited directly +--- +name: 'Subscription' +description: | + A named resource representing the stream of messages from a single, + specific topic, to be delivered to the subscribing application. +references: + guides: + 'Managing Subscriptions': 'https://cloud.google.com/pubsub/docs/admin#managing_subscriptions' + api: 'https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions' +docs: + note: 'You can retrieve the email of the Google Managed Pub/Sub Service Account used for forwarding +by using the `google_project_service_identity` resource. +' +base_url: 'projects/{{project}}/subscriptions' +create_verb: 'PUT' +update_url: 'projects/{{project}}/subscriptions/{{name}}' +update_verb: 'PATCH' +update_mask: true +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + check_response_func_existence: 'transport_tpg.PollCheckForExistence' + check_response_func_absence: 'transport_tpg.PollCheckForAbsence' + suppress_error: true + target_occurrences: 1 +custom_code: + constants: 'templates/terraform/constants/subscription.go.erb' + encoder: 'templates/terraform/encoders/no_send_name.go.erb' + update_encoder: 'templates/terraform/update_encoder/pubsub_subscription.erb' +examples: + - name: 'pubsub_subscription_push' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + - name: 'pubsub_subscription_pull' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + - name: 'pubsub_subscription_dead_letter' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + - name: 'pubsub_subscription_push_bq' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + dataset_id: 'example_dataset' + table_id: 'example_table' + - name: 'pubsub_subscription_push_bq_table_schema' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + dataset_id: 'example_dataset' + table_id: 'example_table' + - name: 'pubsub_subscription_push_cloudstorage' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + bucket_name: 'example-bucket' + - name: 'pubsub_subscription_push_cloudstorage_avro' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + bucket_name: 'example-bucket' +parameters: +properties: + - name: 'name' + type: String + description: "Name of the subscription." + pattern: 'projects/{{project}}/subscriptions/{{name}}' + required: true + immutable: true + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.erb' + custom_expand: 'templates/terraform/custom_expand/shortname_to_url.go.erb' + - name: 'topic' + type: ResourceRef + description: "A reference to a Topic resource, of the form projects/{project}/topics/{{name}} +(as in the id property of a google_pubsub_topic), or just a topic name if +the topic is in the same project as the subscription." + pattern: 'projects/{{project}}/topics/{{topic}}' + required: true + immutable: true + diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' + custom_expand: 'templates/terraform/custom_expand/computed_subscription_topic.erb' + resource: 'Topic' + imports: 'name' + - name: 'labels' + type: KeyValueLabels + description: "A set of key/value label pairs to assign to this Subscription. + + +**Note**: This field is non-authoritative, and will only manage the labels present in your configuration. +Please refer to the field `effective_labels` for all of the labels present on the resource." + immutable: false + - name: 'bigqueryConfig' + type: NestedObject + description: "If delivery to BigQuery is used with this subscription, this field is used to configure it. +Either pushConfig, bigQueryConfig or cloudStorageConfig can be set, but not combined. +If all three are empty, then the subscriber will pull and ack messages using API methods." + conflicts: + - push_config + - cloud_storage_config + properties: + - name: 'table' + type: String + description: "The name of the table to which to write data, of the form {projectId}:{datasetId}.{tableId}" + required: true + - name: 'useTopicSchema' + type: Boolean + description: "When true, use the topic's schema as the columns to write to in BigQuery, if it exists. + Only one of use_topic_schema and use_table_schema can be set." + conflicts: + - use_table_schema + - name: 'useTableSchema' + type: Boolean + description: "When true, use the BigQuery table's schema as the columns to write to in BigQuery. Messages + must be published in JSON format. Only one of use_topic_schema and use_table_schema can be set." + conflicts: + - use_topic_schema + - name: 'writeMetadata' + type: Boolean + description: "When true, write the subscription name, messageId, publishTime, attributes, and orderingKey to additional columns in the table. + The subscription name, messageId, and publishTime fields are put in their own columns while all other message properties (other than data) are written to a JSON object in the attributes column." + - name: 'dropUnknownFields' + type: Boolean + description: "When true and use_topic_schema or use_table_schema is true, any fields that are a part of the topic schema or message schema that + are not part of the BigQuery table schema are dropped when writing to BigQuery. Otherwise, the schemas must be kept in sync + and any messages with extra fields are not written and remain in the subscription's backlog." + - name: 'cloudStorageConfig' + type: NestedObject + description: "If delivery to Cloud Storage is used with this subscription, this field is used to configure it. +Either pushConfig, bigQueryConfig or cloudStorageConfig can be set, but not combined. +If all three are empty, then the subscriber will pull and ack messages using API methods." + conflicts: + - push_config + - bigquery_config + properties: + - name: 'bucket' + type: String + description: "User-provided name for the Cloud Storage bucket. The bucket must be created by the user. The bucket name must be without any prefix like 'gs://'." + required: true + - name: 'filenamePrefix' + type: String + description: "User-provided prefix for Cloud Storage filename." + - name: 'filenameSuffix' + type: String + description: "User-provided suffix for Cloud Storage filename. Must not end in '/'." + - name: 'maxDuration' + type: String + description: "The maximum duration that can elapse before a new Cloud Storage file is created. Min 1 minute, max 10 minutes, default 5 minutes. + May not exceed the subscription's acknowledgement deadline. + A duration in seconds with up to nine fractional digits, ending with 's'. Example: '3.5s'." + default_value: 300s + - name: 'maxBytes' + type: Integer + description: "The maximum bytes that can be written to a Cloud Storage file before a new file is created. Min 1 KB, max 10 GiB. + The maxBytes limit may be exceeded in cases where messages are larger than the limit." + - name: 'state' + type: Enum + description: "An output-only field that indicates whether or not the subscription can receive messages." + output: true + enum_values: + - 'ACTIVE' + - 'PERMISSION_DENIED' + - 'NOT_FOUND' + - name: 'avroConfig' + type: NestedObject + description: "If set, message data will be written to Cloud Storage in Avro format." + properties: + - name: 'writeMetadata' + type: Boolean + description: "When true, write the subscription name, messageId, publishTime, attributes, and orderingKey as additional fields in the output." + - name: 'pushConfig' + type: NestedObject + description: "If push delivery is used with this subscription, this field is used to +configure it. An empty pushConfig signifies that the subscriber will +pull and ack messages using API methods." + conflicts: + - bigquery_config + - cloud_storage_config + properties: + - name: 'oidcToken' + type: NestedObject + description: "If specified, Pub/Sub will generate and attach an OIDC JWT token as + an Authorization header in the HTTP request for every pushed message." + properties: + - name: 'serviceAccountEmail' + type: String + description: "Service account email to be used for generating the OIDC token. + The caller (for subscriptions.create, subscriptions.patch, and + subscriptions.modifyPushConfig RPCs) must have the + iam.serviceAccounts.actAs permission for the service account." + required: true + - name: 'audience' + type: String + description: "Audience to be used when generating OIDC token. The audience claim + identifies the recipients that the JWT is intended for. The audience + value is a single case-sensitive string. Having multiple values (array) + for the audience field is not supported. More info about the OIDC JWT + token audience here: https://tools.ietf.org/html/rfc7519#section-4.1.3 + Note: if not specified, the Push endpoint URL will be used." + - name: 'pushEndpoint' + type: String + description: "A URL locating the endpoint to which messages should be pushed. + For example, a Webhook endpoint might use + 'https://example.com/push'." + required: true + - name: 'attributes' + type: KeyValuePairs + description: "Endpoint configuration attributes. + + Every endpoint has a set of API supported attributes that can + be used to control different aspects of the message delivery. + + The currently supported attribute is x-goog-version, which you + can use to change the format of the pushed message. This + attribute indicates the version of the data expected by + the endpoint. This controls the shape of the pushed message + (i.e., its fields and metadata). The endpoint version is + based on the version of the Pub/Sub API. + + If not present during the subscriptions.create call, + it will default to the version of the API used to make + such call. If not present during a subscriptions.modifyPushConfig + call, its value will not be changed. subscriptions.get + calls will always return a valid version, even if the + subscription was created without this attribute. + + The possible values for this attribute are: + + - v1beta1: uses the push format defined in the v1beta1 Pub/Sub API. + - v1 or v1beta2: uses the push format defined in the v1 Pub/Sub API." + diff_suppress_func: 'tpgresource.IgnoreMissingKeyInMap("x-goog-version")' + - name: 'noWrapper' + type: NestedObject + description: "When set, the payload to the push endpoint is not wrapped.Sets the + `data` field as the HTTP body for delivery." + custom_flatten: 'templates/terraform/custom_flatten/pubsub_no_wrapper_write_metadata_flatten.go.erb' + properties: + - name: 'writeMetadata' + type: Boolean + description: "When true, writes the Pub/Sub message metadata to + `x-goog-pubsub-:` headers of the HTTP request. Writes the + Pub/Sub message attributes to `:` headers of the HTTP request." + required: true + send_empty_value: true + - name: 'ackDeadlineSeconds' + type: Integer + description: "This value is the maximum time after a subscriber receives a message +before the subscriber should acknowledge the message. After message +delivery but before the ack deadline expires and before the message is +acknowledged, it is an outstanding message and will not be delivered +again during that time (on a best-effort basis). + +For pull subscriptions, this value is used as the initial value for +the ack deadline. To override this value for a given message, call +subscriptions.modifyAckDeadline with the corresponding ackId if using +pull. The minimum custom deadline you can specify is 10 seconds. The +maximum custom deadline you can specify is 600 seconds (10 minutes). +If this parameter is 0, a default value of 10 seconds is used. + +For push delivery, this value is also used to set the request timeout +for the call to the push endpoint. + +If the subscriber never acknowledges the message, the Pub/Sub system +will eventually redeliver the message." + default_from_api: true + - name: 'messageRetentionDuration' + type: String + description: "How long to retain unacknowledged messages in the subscription's +backlog, from the moment a message is published. If +retain_acked_messages is true, then this also configures the retention +of acknowledged messages, and thus configures how far back in time a +subscriptions.seek can be done. Defaults to 7 days. Cannot be more +than 7 days (`'604800s'`) or less than 10 minutes (`'600s'`). + +A duration in seconds with up to nine fractional digits, terminated +by 's'. Example: `'600.5s'`." + default_value: 604800s + - name: 'retainAckedMessages' + type: Boolean + description: "Indicates whether to retain acknowledged messages. If `true`, then +messages are not expunged from the subscription's backlog, even if +they are acknowledged, until they fall out of the +messageRetentionDuration window." + - name: 'expirationPolicy' + type: NestedObject + description: "A policy that specifies the conditions for this subscription's expiration. +A subscription is considered active as long as any connected subscriber +is successfully consuming messages from the subscription or is issuing +operations on the subscription. If expirationPolicy is not set, a default +policy with ttl of 31 days will be used. If it is set but ttl is '', the +resource never expires. The minimum allowed value for expirationPolicy.ttl +is 1 day." + default_from_api: true + send_empty_value: true + allow_empty_object: true + properties: + - name: 'ttl' + type: String + description: "Specifies the 'time-to-live' duration for an associated resource. The + resource expires if it is not active for a period of ttl. + If ttl is set to '', the associated resource never expires. + A duration in seconds with up to nine fractional digits, terminated by 's'. + Example - '3.5s'." + required: true + diff_suppress_func: 'comparePubsubSubscriptionExpirationPolicy' + - name: 'filter' + type: String + description: "The subscription only delivers the messages that match the filter. +Pub/Sub automatically acknowledges the messages that don't match the filter. You can filter messages +by their attributes. The maximum length of a filter is 256 bytes. After creating the subscription, +you can't modify the filter." + required: false + immutable: true + - name: 'deadLetterPolicy' + type: NestedObject + description: "A policy that specifies the conditions for dead lettering messages in +this subscription. If dead_letter_policy is not set, dead lettering +is disabled. + +The Cloud Pub/Sub service account associated with this subscription's +parent project (i.e., +service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have +permission to Acknowledge() messages on this subscription." + send_empty_value: true + properties: + - name: 'deadLetterTopic' + type: String + description: "The name of the topic to which dead letter messages should be published. + Format is `projects/{project}/topics/{topic}`. + + The Cloud Pub/Sub service account associated with the enclosing subscription's + parent project (i.e., + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com) must have + permission to Publish() to this topic. + + The operation will fail if the topic does not exist. + Users should ensure that there is a subscription attached to this topic + since messages published to a topic with no subscriptions are lost." + - name: 'maxDeliveryAttempts' + type: Integer + description: "The maximum number of delivery attempts for any message. The value must be + between 5 and 100. + + The number of delivery attempts is defined as 1 + (the sum of number of + NACKs and number of times the acknowledgement deadline has been exceeded for the message). + + A NACK is any call to ModifyAckDeadline with a 0 deadline. Note that + client libraries may automatically extend ack_deadlines. + + This field will be honored on a best effort basis. + + If this parameter is 0, a default value of 5 is used." + - name: 'retryPolicy' + type: NestedObject + description: "A policy that specifies how Pub/Sub retries message delivery for this subscription. + +If not set, the default retry policy is applied. This generally implies that messages will be retried as soon as possible for healthy subscribers. +RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded events for a given message" + properties: + - name: 'minimumBackoff' + type: String + description: "The minimum delay between consecutive deliveries of a given message. Value should be between 0 and 600 seconds. Defaults to 10 seconds. + A duration in seconds with up to nine fractional digits, terminated by 's'. Example: '3.5s'." + default_from_api: true + diff_suppress_func: 'tpgresource.DurationDiffSuppress' + - name: 'maximumBackoff' + type: String + description: "The maximum delay between consecutive deliveries of a given message. Value should be between 0 and 600 seconds. Defaults to 600 seconds. + A duration in seconds with up to nine fractional digits, terminated by 's'. Example: '3.5s'." + default_from_api: true + diff_suppress_func: 'tpgresource.DurationDiffSuppress' + - name: 'enableMessageOrdering' + type: Boolean + description: "If `true`, messages published with the same orderingKey in PubsubMessage will be delivered to +the subscribers in the order in which they are received by the Pub/Sub system. Otherwise, they +may be delivered in any order." + immutable: true + - name: 'enableExactlyOnceDelivery' + type: Boolean + description: "If `true`, Pub/Sub provides the following guarantees for the delivery +of a message with a given value of messageId on this Subscriptions': + +- The message sent to a subscriber is guaranteed not to be resent before the message's acknowledgement deadline expires. + +- An acknowledged message will not be resent to a subscriber. + +Note that subscribers may still receive multiple copies of a message when `enable_exactly_once_delivery` +is true if the message was published multiple times by a publisher client. These copies are considered distinct by Pub/Sub and have distinct messageId values" diff --git a/mmv1/products/pubsub/go_Topic.yaml b/mmv1/products/pubsub/go_Topic.yaml new file mode 100644 index 000000000000..17cddf8c9ea7 --- /dev/null +++ b/mmv1/products/pubsub/go_Topic.yaml @@ -0,0 +1,150 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Warning: This is a temporary file, and should not be edited directly +--- +name: 'Topic' +description: | + A named resource to which messages are sent by publishers. +references: + guides: + 'Managing Topics': 'https://cloud.google.com/pubsub/docs/admin#managing_topics' + api: 'https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.topics' +docs: + note: 'You can retrieve the email of the Google Managed Pub/Sub Service Account used for forwarding +by using the `google_project_service_identity` resource. +' +base_url: 'projects/{{project}}/topics' +create_verb: 'PUT' +update_url: 'projects/{{project}}/topics/{{name}}' +update_verb: 'PATCH' +update_mask: true +timeouts: + insert_minutes: 20 + update_minutes: 20 + delete_minutes: 20 +async: + check_response_func_existence: 'transport_tpg.PollCheckForExistence' + check_response_func_absence: 'transport_tpg.PollCheckForAbsence' + suppress_error: true + target_occurrences: 1 +iam_policy: + method_name_separator: ':' + parent_resource_attribute: 'topic' +custom_code: + encoder: 'templates/terraform/encoders/no_send_name.go.erb' + update_encoder: 'templates/terraform/update_encoder/pubsub_topic.erb' +error_retry_predicates: + + - 'transport_tpg.PubsubTopicProjectNotReady' +examples: + - name: 'pubsub_topic_basic' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + - name: 'pubsub_topic_cmek' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + key_name: 'example-key' + keyring_name: 'example-keyring' + skip_test: true + - name: 'pubsub_topic_geo_restricted' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + - name: 'pubsub_topic_schema_settings' + primary_resource_id: 'example' + primary_resource_name: 'example' + vars: + topic_name: 'example-topic' + schema_name: 'example' + test_env_vars: + project_name: 'PROJECT_NAME' +parameters: +properties: + - name: 'name' + type: String + description: "Name of the topic." + pattern: 'projects/{{project}}/topics/{{name}}' + required: true + immutable: true + diff_suppress_func: 'tpgresource.CompareSelfLinkOrResourceName' + custom_flatten: 'templates/terraform/custom_flatten/name_from_self_link.erb' + custom_expand: 'templates/terraform/custom_expand/resource_from_self_link.go.erb' + - name: 'kmsKeyName' + type: String + description: "The resource name of the Cloud KMS CryptoKey to be used to protect access +to messages published on this topic. Your project's PubSub service account +(`service-{{PROJECT_NUMBER}}@gcp-sa-pubsub.iam.gserviceaccount.com`) must have +`roles/cloudkms.cryptoKeyEncrypterDecrypter` to use this feature. +The expected format is `projects/*/locations/*/keyRings/*/cryptoKeys/*`" + - name: 'labels' + type: KeyValueLabels + description: "A set of key/value label pairs to assign to this Topic. + + +**Note**: This field is non-authoritative, and will only manage the labels present in your configuration. +Please refer to the field `effective_labels` for all of the labels present on the resource." + immutable: false + - name: 'messageStoragePolicy' + type: NestedObject + description: "Policy constraining the set of Google Cloud Platform regions where +messages published to the topic may be stored. If not present, then no +constraints are in effect." + default_from_api: true + properties: + - name: 'allowedPersistenceRegions' + type: Array + description: "A list of IDs of GCP regions where messages that are published to + the topic may be persisted in storage. Messages published by + publishers running in non-allowed GCP regions (or running outside + of GCP altogether) will be routed for storage in one of the + allowed regions. An empty list means that no regions are allowed, + and is not a valid configuration." + required: true + item_type: Api::Type::String + - name: 'schemaSettings' + type: NestedObject + description: "Settings for validating messages published against a schema." + default_from_api: true + properties: + - name: 'schema' + type: String + description: "The name of the schema that messages published should be + validated against. Format is projects/{project}/schemas/{schema}. + The value of this field will be _deleted-schema_ + if the schema has been deleted." + required: true + - name: 'encoding' + type: Enum + description: "The encoding of messages validated against schema." + default_value: ENCODING_UNSPECIFIED + enum_values: + - 'ENCODING_UNSPECIFIED' + - 'JSON' + - 'BINARY' + - '' + - name: 'messageRetentionDuration' + type: String + description: "Indicates the minimum duration to retain a message after it is published +to the topic. If this field is set, messages published to the topic in +the last messageRetentionDuration are always available to subscribers. +For instance, it allows any attached subscription to seek to a timestamp +that is up to messageRetentionDuration in the past. If this field is not +set, message retention is controlled by settings on individual subscriptions. +The rotation period has the format of a decimal number, followed by the +letter `s` (seconds). Cannot be more than 31 days or less than 10 minutes." diff --git a/mmv1/products/pubsub/go_product.yaml b/mmv1/products/pubsub/go_product.yaml new file mode 100644 index 000000000000..f3f63389edd2 --- /dev/null +++ b/mmv1/products/pubsub/go_product.yaml @@ -0,0 +1,22 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Warning: This is a temporary file, and should not be edited directly +--- +name: 'Pubsub' +display_name: 'Cloud Pub/Sub' +versions: + - name: 'ga' + base_url: 'https://pubsub.googleapis.com/v1/' +scopes: + - 'https://www.googleapis.com/auth/pubsub' diff --git a/mmv1/products/secretmanager/SecretVersion.yaml b/mmv1/products/secretmanager/SecretVersion.yaml index 0161b2e6d0d3..9db3d9e31bcb 100644 --- a/mmv1/products/secretmanager/SecretVersion.yaml +++ b/mmv1/products/secretmanager/SecretVersion.yaml @@ -20,6 +20,7 @@ delete_url: '{{name}}:destroy' delete_verb: :POST description: | A secret version resource. +# Sweeper skipped as this resource has customized deletion. skip_sweeper: true examples: - !ruby/object:Provider::Terraform::Examples diff --git a/mmv1/products/spanner/Database.yaml b/mmv1/products/spanner/Database.yaml index 9feb815c7cb9..ea33c9ad211c 100644 --- a/mmv1/products/spanner/Database.yaml +++ b/mmv1/products/spanner/Database.yaml @@ -41,6 +41,7 @@ async: !ruby/object:Api::OpAsync path: 'error' message: 'message' autogen_async: true +# Sweeper skipped as this resource has customized deletion. skip_sweeper: true id_format: '{{instance}}/{{name}}' import_format: diff --git a/mmv1/products/sql/Database.yaml b/mmv1/products/sql/Database.yaml index 1c6c56a9ed14..eca5f0a5c533 100644 --- a/mmv1/products/sql/Database.yaml +++ b/mmv1/products/sql/Database.yaml @@ -30,6 +30,7 @@ import_format: '{{instance}}/{{name}}', '{{name}}', ] +# Sweeper skipped as this resource has customized deletion. skip_sweeper: true examples: - !ruby/object:Provider::Terraform::Examples diff --git a/mmv1/products/workstations/WorkstationConfig.yaml b/mmv1/products/workstations/WorkstationConfig.yaml index 624e5593fd20..39fada27bca8 100644 --- a/mmv1/products/workstations/WorkstationConfig.yaml +++ b/mmv1/products/workstations/WorkstationConfig.yaml @@ -369,6 +369,48 @@ properties: description: | Name of the snapshot to use as the source for the disk. This can be the snapshot's `self_link`, `id`, or a string in the format of `projects/{project}/global/snapshots/{snapshot}`. If set, `sizeGb` and `fsType` must be empty. Can only be updated if it has an existing value. # TODO(esu): Add conflicting fields once complex lists are supported. + - !ruby/object:Api::Type::Array + name: 'ephemeralDirectories' + description: | + Ephemeral directories which won't persist across workstation sessions. + default_from_api: true + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::String + name: 'mountPath' + description: | + Location of this directory in the running workstation. + default_from_api: true + - !ruby/object:Api::Type::NestedObject + name: 'gcePd' + description: | + An EphemeralDirectory backed by a Compute Engine persistent disk. + default_from_api: true + properties: + - !ruby/object:Api::Type::String + name: 'diskType' + description: | + Type of the disk to use. Defaults to `"pd-standard"`. + default_from_api: true + - !ruby/object:Api::Type::String + name: 'sourceSnapshot' + description: | + Name of the snapshot to use as the source for the disk. + + Must be empty if `sourceImage` is set. + Must be empty if `read_only` is false. + Updating `source_snapshot` will update content in the ephemeral directory after the workstation is restarted. + - !ruby/object:Api::Type::String + name: 'sourceImage' + description: | + Name of the disk image to use as the source for the disk. + + Must be empty `sourceSnapshot` is set. + Updating `sourceImage` will update content in the ephemeral directory after the workstation is restarted. + - !ruby/object:Api::Type::Boolean + name: 'readOnly' + description: | + Whether the disk is read only. If true, the disk may be shared by multiple VMs and `sourceSnapshot` must be set. - !ruby/object:Api::Type::NestedObject name: 'container' description: | diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go new file mode 100644 index 000000000000..fb620c1ddce5 --- /dev/null +++ b/mmv1/provider/terraform.go @@ -0,0 +1,892 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package provider + +import ( + "log" + + "github.com/GoogleCloudPlatform/magic-modules/mmv1/api" +) + +const TERRAFORM_PROVIDER_GA = "github.com/hashicorp/terraform-provider-google" +const TERRAFORM_PROVIDER_BETA = "github.com/hashicorp/terraform-provider-google-beta" +const TERRAFORM_PROVIDER_PRIVATE = "internal/terraform-next" +const RESOURCE_DIRECTORY_GA = "google" +const RESOURCE_DIRECTORY_BETA = "google-beta" +const RESOURCE_DIRECTORY_PRIVATE = "google-private" + +type Terraform struct { + ResourceCount int + + IAMResourceCount int + + ResourcesForVersion []api.Resource +} + +func NewTerraform(product *api.Product) *Terraform { + t := Terraform{ResourceCount: 0, IAMResourceCount: 0} + + // TODO Q1 + // @target_version_name = version_name + // + // @version = @api.version_obj_or_closest(version_name) + // @api.set_properties_based_on_version(@version) + + return &t +} + +// +// # Main entry point for generation. +// def generate(output_folder, types, product_path, dump_yaml, generate_code, generate_docs) + +// end + +func (t *Terraform) Generate(outputFolder, productPath string, generateCode, generateDocs bool) { + log.Printf("Generate function called with %s %s %t %t", outputFolder, productPath, generateCode, generateDocs) + + // TODO Q1 + // generate_objects(output_folder, types, generate_code, generate_docs) + // + // FileUtils.mkpath output_folder + // pwd = Dir.pwd + // if generate_code + // Dir.chdir output_folder + // + // generate_operation(pwd, output_folder, types) + // Dir.chdir pwd + // end + // + // # Write a file with the final version of the api, after overrides + // # have been applied. + // return unless dump_yaml + // + // raise 'Path to output the final yaml was not specified.' \ + // if product_path.nil? || product_path == '' + // + // File.open("#{product_path}/final_api.yaml", 'w') do |file| + // file.write("# This is a generated file, its contents will be overwritten.\n") + // file.write(YAML.dump(@api)) + // end +} + +// +// # generate_code and generate_docs are actually used because all of the variables +// # in scope in this method are made available within the templates by the compile call. +// # rubocop:disable Lint/UnusedMethodArgument +// def copy_common_files(output_folder, generate_code, generate_docs, provider_name = nil) +// # version_name is actually used because all of the variables in scope in this method +// # are made available within the templates by the compile call. +// # TODO: remove version_name, use @target_version_name or pass it in expicitly +// # rubocop:disable Lint/UselessAssignment +// version_name = @target_version_name +// # rubocop:enable Lint/UselessAssignment +// provider_name ||= self.class.name.split('::').last.downcase +// return unless File.exist?("provider/#{provider_name}/common~copy.yaml") +// +// Google::LOGGER.info "Copying common files for #{provider_name}" +// files = YAML.safe_load(compile("provider/#{provider_name}/common~copy.yaml")) +// copy_file_list(output_folder, files) +// end +// # rubocop:enable Lint/UnusedMethodArgument +// +// def copy_file_list(output_folder, files) +// files.map do |target, source| +// Thread.new do +// target_file = File.join(output_folder, target) +// target_dir = File.dirname(target_file) +// Google::LOGGER.debug "Copying #{source} => #{target}" +// FileUtils.mkpath target_dir +// +// # If we've modified a file since starting an MM run, it's a reasonable +// # assumption that it was this run that modified it. +// if File.exist?(target_file) && File.mtime(target_file) > @start_time +// raise "#{target_file} was already modified during this run. #{File.mtime(target_file)}" +// end +// +// FileUtils.copy_entry source, target_file +// +// add_hashicorp_copyright_header(output_folder, target) if File.extname(target) == '.go' +// if File.extname(target) == '.go' || File.extname(target) == '.mod' +// replace_import_path(output_folder, target) +// end +// end +// end.map(&:join) +// end +// +// # Compiles files that are shared at the provider level +// def compile_common_files( +// output_folder, +// products, +// common_compile_file, +// override_path = nil +// ) +// return unless File.exist?(common_compile_file) +// +// generate_resources_for_version(products, @target_version_name) +// +// files = YAML.safe_load(compile(common_compile_file)) +// return unless files +// +// file_template = ProviderFileTemplate.new( +// output_folder, +// @target_version_name, +// build_env, +// products, +// override_path +// ) +// compile_file_list(output_folder, files, file_template) +// end +// +// def compile_file_list(output_folder, files, file_template, pwd = Dir.pwd) +// FileUtils.mkpath output_folder +// Dir.chdir output_folder +// files.map do |target, source| +// Thread.new do +// Google::LOGGER.debug "Compiling #{source} => #{target}" +// file_template.generate(pwd, source, target, self) +// +// add_hashicorp_copyright_header(output_folder, target) +// replace_import_path(output_folder, target) +// end +// end.map(&:join) +// Dir.chdir pwd +// end +// +// def add_hashicorp_copyright_header(output_folder, target) +// unless expected_output_folder?(output_folder) +// Google::LOGGER.info "Unexpected output folder (#{output_folder}) detected " \ +// 'when deciding to add HashiCorp copyright headers. ' \ +// 'Watch out for unexpected changes to copied files' +// end +// # only add copyright headers when generating TPG and TPGB +// return unless output_folder.end_with?('terraform-provider-google') || +// output_folder.end_with?('terraform-provider-google-beta') +// +// # Prevent adding copyright header to files with paths or names matching the strings below +// # NOTE: these entries need to match the content of the .copywrite.hcl file originally +// # created in https://github.com/GoogleCloudPlatform/magic-modules/pull/7336 +// # The test-fixtures folder is not included here as it's copied as a whole, +// # not file by file (see common~copy.yaml) +// ignored_folders = [ +// '.release/', +// '.changelog/', +// 'examples/', +// 'scripts/', +// 'META.d/' +// ] +// ignored_files = [ +// 'go.mod', +// '.goreleaser.yml', +// '.golangci.yml', +// 'terraform-registry-manifest.json' +// ] +// should_add_header = true +// ignored_folders.each do |folder| +// # folder will be path leading to file +// next unless target.start_with? folder +// +// Google::LOGGER.debug 'Not adding HashiCorp copyright headers in ' \ +// "ignored folder #{folder} : #{target}" +// should_add_header = false +// end +// return unless should_add_header +// +// ignored_files.each do |file| +// # file will be the filename and extension, with no preceding path +// next unless target.end_with? file +// +// Google::LOGGER.debug 'Not adding HashiCorp copyright headers to ' \ +// "ignored file #{file} : #{target}" +// should_add_header = false +// end +// return unless should_add_header +// +// Google::LOGGER.debug "Adding HashiCorp copyright header to : #{target}" +// data = File.read("#{output_folder}/#{target}") +// +// copyright_header = ['Copyright (c) HashiCorp, Inc.', 'SPDX-License-Identifier: MPL-2.0'] +// lang = language_from_filename(target) +// +// # Some file types we don't want to add headers to +// # e.g. .sh where headers are functional +// # Also, this guards against new filetypes being added and triggering build errors +// return unless lang != :unsupported +// +// # File is not ignored and is appropriate file type to add header to +// header = comment_block(copyright_header, lang) +// File.write("#{output_folder}/#{target}", header) +// +// File.write("#{output_folder}/#{target}", data, mode: 'a') # append mode +// end +// +// def expected_output_folder?(output_folder) +// expected_folders = %w[ +// terraform-provider-google +// terraform-provider-google-beta +// terraform-next +// terraform-google-conversion +// tfplan2cai +// ] +// folder_name = output_folder.split('/')[-1] # Possible issue with Windows OS +// is_expected = false +// expected_folders.each do |folder| +// next unless folder_name == folder +// +// is_expected = true +// break +// end +// is_expected +// end +// +// def replace_import_path(output_folder, target) +// data = File.read("#{output_folder}/#{target}") +// +// if data.include? "#{TERRAFORM_PROVIDER_BETA}/#{RESOURCE_DIRECTORY_BETA}" +// raise 'Importing a package from module ' \ +// "#{TERRAFORM_PROVIDER_BETA}/#{RESOURCE_DIRECTORY_BETA} " \ +// "is not allowed in file #{target.split('/').last}. " \ +// 'Please import a package from module ' \ +// "#{TERRAFORM_PROVIDER_GA}/#{RESOURCE_DIRECTORY_GA}." +// end +// +// return if @target_version_name == 'ga' +// +// # Replace the import pathes in utility files +// case @target_version_name +// when 'beta' +// tpg = TERRAFORM_PROVIDER_BETA +// dir = RESOURCE_DIRECTORY_BETA +// else +// tpg = TERRAFORM_PROVIDER_PRIVATE +// dir = RESOURCE_DIRECTORY_PRIVATE +// end +// +// data = data.gsub( +// "#{TERRAFORM_PROVIDER_GA}/#{RESOURCE_DIRECTORY_GA}", +// "#{tpg}/#{dir}" +// ) +// data = data.gsub( +// "#{TERRAFORM_PROVIDER_GA}/version", +// "#{tpg}/version" +// ) +// +// data = data.gsub( +// "module #{TERRAFORM_PROVIDER_GA}", +// "module #{tpg}" +// ) +// File.write("#{output_folder}/#{target}", data) +// end +// +// def import_path +// case @target_version_name +// when 'ga' +// "#{TERRAFORM_PROVIDER_GA}/#{RESOURCE_DIRECTORY_GA}" +// when 'beta' +// "#{TERRAFORM_PROVIDER_BETA}/#{RESOURCE_DIRECTORY_BETA}" +// else +// "#{TERRAFORM_PROVIDER_PRIVATE}/#{RESOURCE_DIRECTORY_PRIVATE}" +// end +// end +// +// # Gets the list of services dependent on the version ga, beta, and private +// # If there are some resources of a servcie is in GA, +// # then this service is in GA. Otherwise, the service is in BETA +// def get_mmv1_services_in_version(products, version) +// services = [] +// products.map do |product| +// product_definition = product[:definitions] +// if version == 'ga' +// some_resource_in_ga = false +// product_definition.objects.each do |object| +// break if some_resource_in_ga +// +// if !object.exclude && +// !object.not_in_version?(product_definition.version_obj_or_closest(version)) +// some_resource_in_ga = true +// end +// end +// +// services << product[:definitions].name.downcase if some_resource_in_ga +// else +// services << product[:definitions].name.downcase +// end +// end +// services +// end +// +// def generate_objects(output_folder, types, generate_code, generate_docs) +// (@api.objects || []).each do |object| +// if !types.empty? && !types.include?(object.name) +// Google::LOGGER.info "Excluding #{object.name} per user request" +// elsif types.empty? && object.exclude +// Google::LOGGER.info "Excluding #{object.name} per API catalog" +// elsif types.empty? && object.not_in_version?(@version) +// Google::LOGGER.info "Excluding #{object.name} per API version" +// else +// Google::LOGGER.info "Generating #{object.name}" +// # exclude_if_not_in_version must be called in order to filter out +// # beta properties that are nested within GA resources +// object.exclude_if_not_in_version!(@version) +// +// # Make object immutable. +// object.freeze +// object.all_user_properties.each(&:freeze) +// +// generate_object object, output_folder, @target_version_name, generate_code, generate_docs +// end +// # Uncomment for go YAML +// # generate_object_modified object, output_folder, @target_version_name +// end +// end +// +// def generate_object(object, output_folder, version_name, generate_code, generate_docs) +// pwd = Dir.pwd +// data = build_object_data(pwd, object, output_folder, version_name) +// unless object.exclude_resource +// FileUtils.mkpath output_folder +// Dir.chdir output_folder +// Google::LOGGER.debug "Generating #{object.name} resource" +// generate_resource(pwd, data.clone, generate_code, generate_docs) +// if generate_code +// Google::LOGGER.debug "Generating #{object.name} tests" +// generate_resource_tests(pwd, data.clone) +// generate_resource_sweepers(pwd, data.clone) +// end +// Dir.chdir pwd +// end +// # if iam_policy is not defined or excluded, don't generate it +// return if object.iam_policy.nil? || object.iam_policy.exclude +// +// FileUtils.mkpath output_folder +// Dir.chdir output_folder +// Google::LOGGER.debug "Generating #{object.name} IAM policy" +// generate_iam_policy(pwd, data.clone, generate_code, generate_docs) +// Dir.chdir pwd +// end +// +// def generate_object_modified(object, output_folder, version_name) +// pwd = Dir.pwd +// data = build_object_data(pwd, object, output_folder, version_name) +// FileUtils.mkpath output_folder +// Dir.chdir output_folder +// Google::LOGGER.debug "Generating #{object.name} rewrite yaml" +// generate_newyaml(pwd, data.clone) +// Dir.chdir pwd +// end +// +// def generate_newyaml(pwd, data) +// # @api.api_name is the service folder name +// product_name = @api.api_name +// target_folder = File.join(folder_name(data.version), 'services', product_name) +// FileUtils.mkpath target_folder +// data.generate(pwd, +// '/templates/terraform/yaml_conversion.erb', +// "#{target_folder}/go_#{data.object.name}.yaml", +// self) +// return if File.exist?("#{target_folder}/go_product.yaml") +// +// data.generate(pwd, +// '/templates/terraform/product_yaml_conversion.erb', +// "#{target_folder}/go_product.yaml", +// self) +// end +// +// def build_env +// { +// goformat_enabled: @go_format_enabled, +// start_time: @start_time +// } +// end +// +// # used to determine and separate objects that have update methods +// # that target individual fields +// def field_specific_update_methods(properties) +// properties_by_custom_update(properties).length.positive? +// end +// +// # Filter the properties to keep only the ones requiring custom update +// # method and group them by update url & verb. +// def properties_by_custom_update(properties) +// update_props = properties.reject do |p| +// p.update_url.nil? || p.update_verb.nil? || p.update_verb == :NOOP || +// p.is_a?(Api::Type::KeyValueTerraformLabels) || +// p.is_a?(Api::Type::KeyValueLabels) # effective_labels is used for update +// end +// +// update_props.group_by do |p| +// { +// update_url: p.update_url, +// update_verb: p.update_verb, +// update_id: p.update_id, +// fingerprint_name: p.fingerprint_name +// } +// end +// end +// +// # Filter the properties to keep only the ones don't have custom update +// # method and group them by update url & verb. +// def properties_without_custom_update(properties) +// properties.select do |p| +// p.update_url.nil? || p.update_verb.nil? || p.update_verb == :NOOP +// end +// end +// +// # Takes a update_url and returns the list of custom updatable properties +// # that can be updated at that URL. This allows flattened objects +// # to determine which parent property in the API should be updated with +// # the contents of the flattened object +// def custom_update_properties_by_key(properties, key) +// properties_by_custom_update(properties).select do |k, _| +// k[:update_url] == key[:update_url] && +// k[:update_id] == key[:update_id] && +// k[:fingerprint_name] == key[:fingerprint_name] +// end.first.last +// # .first is to grab the element from the select which returns a list +// # .last is because properties_by_custom_update returns a list of +// # [{update_url}, [properties,...]] and we only need the 2nd part +// end +// +// def update_url(resource, url_part) +// [resource.__product.base_url, update_uri(resource, url_part)].flatten.join +// end +// +// def update_uri(resource, url_part) +// return resource.self_link_uri if url_part.nil? +// +// url_part +// end +// +// def generating_hashicorp_repo? +// # The default Provider is used to generate TPG and TPGB in HashiCorp-owned repos. +// # The compiler deviates from the default behaviour with a -f flag to produce +// # non-HashiCorp downstreams. +// true +// end +// +// # ProductFileTemplate with Terraform specific fields +// class TerraformProductFileTemplate < Provider::ProductFileTemplate +// # The async object used for making operations. +// # We assume that all resources share the same async properties. +// attr_accessor :async +// +// # When generating OiCS examples, we attach the example we're +// # generating to the data object. +// attr_accessor :example +// +// attr_accessor :resource_name +// end +// +// # Sorts properties in the order they should appear in the TF schema: +// # Required, Optional, Computed +// def order_properties(properties) +// properties.select(&:required).sort_by(&:name) + +// properties.reject(&:required).reject(&:output).sort_by(&:name) + +// properties.select(&:output).sort_by(&:name) +// end +// +// def tf_type(property) +// tf_types[property.class] +// end +// +// # "Namespace" - prefix with product and resource - a property with +// # information from the "object" variable +// def namespace_property_from_object(property, object) +// name = property.name.camelize +// until property.parent.nil? +// property = property.parent +// name = property.name.camelize + name +// end +// +// "#{property.__resource.__product.api_name.camelize(:lower)}#{object.name}#{name}" +// end +// +// # Converts between the Magic Modules type of an object and its type in the +// # TF schema +// def tf_types +// { +// Api::Type::Boolean => 'schema.TypeBool', +// Api::Type::Double => 'schema.TypeFloat', +// Api::Type::Integer => 'schema.TypeInt', +// Api::Type::String => 'schema.TypeString', +// # Anonymous string property used in array of strings. +// 'Api::Type::String' => 'schema.TypeString', +// Api::Type::Time => 'schema.TypeString', +// Api::Type::Enum => 'schema.TypeString', +// Api::Type::ResourceRef => 'schema.TypeString', +// Api::Type::NestedObject => 'schema.TypeList', +// Api::Type::Array => 'schema.TypeList', +// Api::Type::KeyValuePairs => 'schema.TypeMap', +// Api::Type::KeyValueLabels => 'schema.TypeMap', +// Api::Type::KeyValueTerraformLabels => 'schema.TypeMap', +// Api::Type::KeyValueEffectiveLabels => 'schema.TypeMap', +// Api::Type::KeyValueAnnotations => 'schema.TypeMap', +// Api::Type::Map => 'schema.TypeSet', +// Api::Type::Fingerprint => 'schema.TypeString' +// } +// end +// +// def updatable?(resource, properties) +// !resource.immutable || !properties.reject { |p| p.update_url.nil? }.empty? +// end +// +// def force_new?(property, resource) +// ( +// (!property.output || property.is_a?(Api::Type::KeyValueEffectiveLabels)) && +// (property.immutable || +// (resource.immutable && property.update_url.nil? && property.immutable.nil? && +// (property.parent.nil? || +// (force_new?(property.parent, resource) && +// !(property.parent.flatten_object && property.is_a?(Api::Type::KeyValueLabels)) +// ) +// ) +// ) +// ) +// ) || +// (property.is_a?(Api::Type::KeyValueTerraformLabels) && +// !updatable?(resource, resource.all_user_properties) && !resource.root_labels? +// ) +// end +// +// # Returns tuples of (fieldName, list of update masks) for +// # top-level updatable fields. Schema path refers to a given Terraform +// # field name (e.g. d.GetChange('fieldName)') +// def get_property_update_masks_groups(properties, mask_prefix: '') +// mask_groups = [] +// properties.each do |prop| +// if prop.flatten_object +// mask_groups += get_property_update_masks_groups( +// prop.properties, mask_prefix: "#{prop.api_name}." +// ) +// elsif prop.update_mask_fields +// mask_groups << [prop.name.underscore, prop.update_mask_fields] +// else +// mask_groups << [prop.name.underscore, [mask_prefix + prop.api_name]] +// end +// end +// mask_groups +// end +// +// # Returns an updated path for a given Terraform field path (e.g. +// # 'a_field', 'parent_field.0.child_name'). Returns nil if the property +// # is not included in the resource's properties and removes keys that have +// # been flattened +// # FYI: Fields that have been renamed should use the new name, however, flattened +// # fields still need to be included, ie: +// # flattenedField > newParent > renameMe should be passed to this function as +// # flattened_field.0.new_parent.0.im_renamed +// # TODO(emilymye): Change format of input for +// # exactly_one_of/at_least_one_of/etc to use camelcase, MM properities and +// # convert to snake in this method +// def get_property_schema_path(schema_path, resource) +// nested_props = resource.properties +// prop = nil +// path_tkns = schema_path.split('.0.').map do |pname| +// camel_pname = pname.camelize(:lower) +// prop = nested_props.find { |p| p.name == camel_pname } +// # if we couldn't find it, see if it was renamed at the top level +// prop = nested_props.find { |p| p.name == schema_path } if prop.nil? +// return nil if prop.nil? +// +// nested_props = prop.nested_properties || [] +// prop.flatten_object ? nil : pname.underscore +// end +// if path_tkns.empty? || path_tkns[-1].nil? +// nil +// else +// path_tkns.compact.join('.0.') +// end +// end +// +// # Transforms a format string with field markers to a regex string with +// # capture groups. +// # +// # For instance, +// # projects/{{project}}/global/networks/{{name}} +// # is transformed to +// # projects/(?P[^/]+)/global/networks/(?P[^/]+) +// # +// # Values marked with % are URL-encoded, and will match any number of /'s. +// # +// # Note: ?P indicates a Python-compatible named capture group. Named groups +// # aren't common in JS-based regex flavours, but are in Perl-based ones +// def format2regex(format) +// format +// .gsub(/\{\{%([[:word:]]+)\}\}/, '(?P<\1>.+)') +// .gsub(/\{\{([[:word:]]+)\}\}/, '(?P<\1>[^/]+)') +// end +// +// # Capitalize the first letter of a property name. +// # E.g. "creationTimestamp" becomes "CreationTimestamp". +// def titlelize_property(property) +// property.name.camelize(:upper) +// end +// +// # Generates the list of resources, and gets the count of resources and iam resources +// # dependent on the version ga, beta or private. +// # The resource object has the format +// # { +// # terraform_name: +// # resource_name: +// # iam_class_name: +// # } +// # The variable resources_for_version is used to generate resources in file +// # mmv1/third_party/terraform/provider/provider_mmv1_resources.go.erb +// def generate_resources_for_version(products, version) +// products.each do |product| +// product_definition = product[:definitions] +// service = product_definition.name.downcase +// product_definition.objects.each do |object| +// if object.exclude || +// object.not_in_version?(product_definition.version_obj_or_closest(version)) +// next +// end +// +// @resource_count += 1 unless object&.exclude_resource +// +// tf_product = (object.__product.legacy_name || product_definition.name).underscore +// terraform_name = object.legacy_name || "google_#{tf_product}_#{object.name.underscore}" +// +// unless object&.exclude_resource +// resource_name = "#{service}.Resource#{product_definition.name}#{object.name}" +// end +// +// iam_policy = object&.iam_policy +// +// @iam_resource_count += 3 unless iam_policy.nil? || iam_policy.exclude +// +// unless iam_policy.nil? || iam_policy.exclude || +// (iam_policy.min_version && iam_policy.min_version < version) +// iam_class_name = "#{service}.#{product_definition.name}#{object.name}" +// end +// +// @resources_for_version << { terraform_name:, resource_name:, iam_class_name: } +// end +// end +// +// @resources_for_version = @resources_for_version.compact +// end +// +// # TODO(nelsonjr): Review all object interfaces and move to private methods +// # that should not be exposed outside the object hierarchy. +// private +// +// def provider_name +// self.class.name.split('::').last.downcase +// end +// +// # Adapted from the method used in templating +// # See: mmv1/compile/core.rb +// def comment_block(text, lang) +// case lang +// when :ruby, :python, :yaml, :git, :gemfile +// header = text.map { |t| t&.empty? ? '#' : "# #{t}" } +// when :go +// header = text.map { |t| t&.empty? ? '//' : "// #{t}" } +// else +// raise "Unknown language for comment: #{lang}" +// end +// +// header_string = header.join("\n") +// "#{header_string}\n" # add trailing newline to returned value +// end +// +// def language_from_filename(filename) +// extension = filename.split('.')[-1] +// case extension +// when 'go' +// :go +// when 'rb' +// :ruby +// when 'yaml', 'yml' +// :yaml +// else +// :unsupported +// end +// end +// +// # Finds the folder name for a given version of the terraform provider +// def folder_name(version) +// version == 'ga' ? 'google' : "google-#{version}" +// end +// +// # This function uses the resource.erb template to create one file +// # per resource. The resource.erb template forms the basis of a single +// # GCP Resource on Terraform. +// def generate_resource(pwd, data, generate_code, generate_docs) +// if generate_code +// # @api.api_name is the service folder name +// product_name = @api.api_name +// target_folder = File.join(folder_name(data.version), 'services', product_name) +// FileUtils.mkpath target_folder +// data.generate(pwd, +// '/templates/terraform/resource.erb', +// "#{target_folder}/resource_#{full_resource_name(data)}.go", +// self) +// end +// +// return unless generate_docs +// +// generate_documentation(pwd, data) +// end +// +// def generate_documentation(pwd, data) +// target_folder = data.output_folder +// target_folder = File.join(target_folder, 'website', 'docs', 'r') +// FileUtils.mkpath target_folder +// filepath = File.join(target_folder, "#{full_resource_name(data)}.html.markdown") +// data.generate(pwd, 'templates/terraform/resource.html.markdown.erb', filepath, self) +// end +// +// def generate_resource_tests(pwd, data) +// return if data.object.examples +// .reject(&:skip_test) +// .reject do |e| +// @api.version_obj_or_closest(data.version) \ +// < @api.version_obj_or_closest(e.min_version) +// end +// .empty? +// +// product_name = @api.api_name +// target_folder = File.join(folder_name(data.version), 'services', product_name) +// FileUtils.mkpath folder_name(data.version) +// data.generate( +// pwd, +// 'templates/terraform/examples/base_configs/test_file.go.erb', +// "#{target_folder}/resource_#{full_resource_name(data)}_generated_test.go", +// self +// ) +// end +// +// def generate_resource_sweepers(pwd, data) +// return if data.object.skip_sweeper || +// data.object.custom_code.custom_delete || +// data.object.custom_code.pre_delete || +// data.object.custom_code.post_delete || +// data.object.skip_delete +// +// product_name = @api.api_name +// target_folder = File.join(folder_name(data.version), 'services', product_name) +// file_name = +// "#{target_folder}/resource_#{full_resource_name(data)}_sweeper.go" +// FileUtils.mkpath folder_name(data.version) +// data.generate(pwd, +// 'templates/terraform/sweeper_file.go.erb', +// file_name, +// self) +// end +// +// def generate_operation(pwd, output_folder, _types) +// return if @api.objects.select(&:autogen_async).empty? +// +// product_name = @api.api_name +// product_name_underscore = @api.name.underscore +// data = build_object_data(pwd, @api.objects.first, output_folder, @target_version_name) +// +// data.object = @api.objects.select(&:autogen_async).first +// +// data.async = data.object.async +// target_folder = File.join(folder_name(data.version), 'services', product_name) +// FileUtils.mkpath target_folder +// data.generate(pwd, +// 'templates/terraform/operation.go.erb', +// "#{target_folder}/#{product_name_underscore}_operation.go", +// self) +// end +// +// # Generate the IAM policy for this object. This is used to query and test +// # IAM policies separately from the resource itself +// def generate_iam_policy(pwd, data, generate_code, generate_docs) +// if generate_code \ +// && (!data.object.iam_policy.min_version \ +// || data.object.iam_policy.min_version >= data.version) +// product_name = @api.api_name +// target_folder = File.join(folder_name(data.version), 'services', product_name) +// FileUtils.mkpath target_folder +// data.generate(pwd, +// 'templates/terraform/iam_policy.go.erb', +// "#{target_folder}/iam_#{full_resource_name(data)}.go", +// self) +// +// # Only generate test if testable examples exist. +// unless data.object.examples.reject(&:skip_test).empty? +// data.generate( +// pwd, +// 'templates/terraform/examples/base_configs/iam_test_file.go.erb', +// "#{target_folder}/iam_#{full_resource_name(data)}_generated_test.go", +// self +// ) +// end +// end +// +// return unless generate_docs +// +// generate_iam_documentation(pwd, data) +// end +// +// def generate_iam_documentation(pwd, data) +// target_folder = data.output_folder +// resource_doc_folder = File.join(target_folder, 'website', 'docs', 'r') +// datasource_doc_folder = File.join(target_folder, 'website', 'docs', 'd') +// FileUtils.mkpath resource_doc_folder +// filepath = +// File.join(resource_doc_folder, "#{full_resource_name(data)}_iam.html.markdown") +// +// data.generate(pwd, 'templates/terraform/resource_iam.html.markdown.erb', filepath, self) +// FileUtils.mkpath datasource_doc_folder +// filepath = +// File.join(datasource_doc_folder, "#{full_resource_name(data)}_iam_policy.html.markdown") +// +// data.generate(pwd, 'templates/terraform/datasource_iam.html.markdown.erb', filepath, self) +// end +// +// def build_object_data(_pwd, object, output_folder, version) +// TerraformProductFileTemplate.file_for_resource( +// output_folder, +// object, +// version, +// build_env +// ) +// end +// +// def extract_identifiers(url) +// url.scan(/\{\{%?(\w+)\}\}/).flatten +// end +// +// # Returns the id format of an object, or self_link_uri if none is explicitly defined +// # We prefer the long name of a resource as the id so that users can reference +// # resources in a standard way, and most APIs accept short name, long name or self_link +// def id_format(object) +// object.id_format || object.self_link_uri +// end +// +// def full_resource_name(data) +// if data.object.legacy_name +// data.object.legacy_name.sub(/^google_/, '') +// else +// name = data.object.filename_override || data.object.name.underscore +// product_name = data.product.legacy_name || data.product.name.underscore +// "#{product_name}_#{name}" +// end +// end +// +// # Returns the extension for DCL packages for the given version. This is needed +// # as the DCL uses "alpha" for preview resources, while we use "private" +// def dcl_version(version) +// return '' if version == 'ga' +// return '/beta' if version == 'beta' +// return '/alpha' if version == 'private' +// end +// end +//end +// diff --git a/mmv1/provider/terraform.rb b/mmv1/provider/terraform.rb index 3edea972c351..37fd2de9f525 100644 --- a/mmv1/provider/terraform.rb +++ b/mmv1/provider/terraform.rb @@ -377,6 +377,8 @@ def generate_objects(output_folder, types, generate_code, generate_docs) generate_object object, output_folder, @target_version_name, generate_code, generate_docs end + # Uncomment for go YAML + # generate_object_modified object, output_folder, @target_version_name end end @@ -395,7 +397,6 @@ def generate_object(object, output_folder, version_name, generate_code, generate end Dir.chdir pwd end - # if iam_policy is not defined or excluded, don't generate it return if object.iam_policy.nil? || object.iam_policy.exclude @@ -406,6 +407,33 @@ def generate_object(object, output_folder, version_name, generate_code, generate Dir.chdir pwd end + def generate_object_modified(object, output_folder, version_name) + pwd = Dir.pwd + data = build_object_data(pwd, object, output_folder, version_name) + FileUtils.mkpath output_folder + Dir.chdir output_folder + Google::LOGGER.debug "Generating #{object.name} rewrite yaml" + generate_newyaml(pwd, data.clone) + Dir.chdir pwd + end + + def generate_newyaml(pwd, data) + # @api.api_name is the service folder name + product_name = @api.api_name + target_folder = File.join(folder_name(data.version), 'services', product_name) + FileUtils.mkpath target_folder + data.generate(pwd, + '/templates/terraform/yaml_conversion.erb', + "#{target_folder}/go_#{data.object.name}.yaml", + self) + return if File.exist?("#{target_folder}/go_product.yaml") + + data.generate(pwd, + '/templates/terraform/product_yaml_conversion.erb', + "#{target_folder}/go_product.yaml", + self) + end + def build_env { goformat_enabled: @go_format_enabled, diff --git a/mmv1/provider/terraform/custom_code.go b/mmv1/provider/terraform/custom_code.go new file mode 100644 index 000000000000..1213cf0b6572 --- /dev/null +++ b/mmv1/provider/terraform/custom_code.go @@ -0,0 +1,206 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package terraform + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// require 'uri' +// require 'api/object' +// require 'compile/core' +// require 'google/golang_utils' + +// Inserts custom code into terraform resources. +type CustomCode struct { + google.YamlValidator + + // Collection of fields allowed in the CustomCode section for + // Terraform. + + // All custom code attributes are string-typed. The string should + // be the name of a template file which will be compiled in the + // specified / described place. + // + // ====================== + // schema.Resource stuff + // ====================== + // Extra Schema Entries go below all other schema entries in the + // resource's Resource.Schema map. They should be formatted as + // entries in the map, e.g. `"foo": &schema.Schema{ ... },`. + + // attr_reader : + ExtraSchemaEntry string + + // ==================== + // Encoders & Decoders + // ==================== + // The encoders are functions which take the `obj` map after it + // has been assembled in either "Create" or "Update" and mutate it + // before it is sent to the server. There are lots of reasons you + // might want to use these - any differences between local schema + // and remote schema will be placed here. + // Because the call signature of this function cannot be changed, + // the template will place the function header and closing } for + // you, and your custom code template should *not* include them. + + // attr_reader : + Encoder string + + // The update encoder is the encoder used in Update - if one is + // not provided, the regular encoder is used. If neither is + // provided, of course, neither is used. Similarly, the custom + // code should *not* include the function header or closing }. + // Update encoders are only used if object.input is false, + // because when object.input is true, only individual fields + // can be updated - in that case, use a custom expander. + + // attr_reader : + UpdateEncoder string + + // The decoder is the opposite of the encoder - it's called + // after the Read succeeds, rather than before Create / Update + // are called. Like with encoders, the decoder should not + // include the function header or closing }. + + // attr_reader : + Decoder string + + // ===================== + // Simple customizations + // ===================== + // Constants go above everything else in the file, and include + // things like methods that will be referred to by name elsewhere + // (e.g. "fooBarDiffSuppress") and regexes that are necessarily + // exported (e.g. "fooBarValidationRegex"). + + // attr_reader : + Constants string + + // This code is run before the Create call happens. It's placed + // in the Create function, just before the Create call is made. + + // attr_reader : + PreCreate string + + // This code is run after the Create call succeeds. It's placed + // in the Create function directly without modification. + + // attr_reader : + PostCreate string + + // This code is run after the Create call fails before the error is + // returned. It's placed in the Create function directly without + // modification. + + // attr_reader : + PostCreateFailure string + + // This code replaces the entire contents of the Create call. It + // should be used for resources that don't have normal creation + // semantics that cannot be supported well by other MM features. + + // attr_reader : + CustomCreate string + + // This code is run before the Read call happens. It's placed + // in the Read function. + + // attr_reader : + PreRead string + + // This code is run before the Update call happens. It's placed + // in the Update function, just after the encoder call, before + // the Update call. Just like the encoder, it is only used if + // object.input is false. + + // attr_reader : + PreUpdate string + + // This code is run after the Update call happens. It's placed + // in the Update function, just after the call succeeds. + // Just like the encoder, it is only used if object.input is + // false. + + // attr_reader : + PostUpdate string + + // This code replaces the entire contents of the Update call. It + // should be used for resources that don't have normal update + // semantics that cannot be supported well by other MM features. + + // attr_reader : + CustomUpdate string + + // This code is run just before the Delete call happens. It's + // useful to prepare an object for deletion, e.g. by detaching + // a disk before deleting it. + + // attr_reader : + PreDelete string + + // This code is run just after the Delete call happens. + + // attr_reader : + PostDelete string + + // This code replaces the entire delete method. Since the delete + // method's function header can't be changed, the template + // inserts that for you - do not include it in your custom code. + + // attr_reader : + CustomDelete string + + // This code replaces the entire import method. Since the import + // method's function header can't be changed, the template + // inserts that for you - do not include it in your custom code. + + // attr_reader : + CustomImport string + + // This code is run just after the import method succeeds - it + // is useful for parsing attributes that are necessary for + // the Read() method to succeed. + + // attr_reader : + PostImport string + + // This code is run in the generated test file to check that the + // resource was successfully deleted. Use this if the API responds + // with a success HTTP code for deleted resources + + // attr_reader : + TestCheckDestroy string +} + +// def validate +// super + +// check :extra_schema_entry, type: String +// check :encoder, type: String +// check :update_encoder, type: String +// check :decoder, type: String +// check :constants, type: String +// check :pre_create, type: String +// check :post_create, type: String +// check :custom_create, type: String +// check :pre_read, type: String +// check :pre_update, type: String +// check :post_update, type: String +// check :custom_update, type: String +// check :pre_delete, type: String +// check :custom_import, type: String +// check :post_import, type: String +// check :test_check_destroy, type: String +// end diff --git a/mmv1/provider/terraform/docs.go b/mmv1/provider/terraform/docs.go new file mode 100644 index 000000000000..408cb8d9fac0 --- /dev/null +++ b/mmv1/provider/terraform/docs.go @@ -0,0 +1,61 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package terraform + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// require 'uri' +// require 'api/object' +// require 'compile/core' +// require 'google/golang_utils' + +// Inserts custom strings into terraform resource docs. +type Docs struct { + google.YamlValidator + + // All these values should be strings, which will be inserted + // directly into the terraform resource documentation. The + // strings should _not_ be the names of template files + // (This should be reconsidered if we find ourselves repeating + // any string more than ones), but rather the actual text + // (including markdown) which needs to be injected into the + // template. + // The text will be injected at the bottom of the specified + // section. + // attr_reader : + Warning string + + // attr_reader : + Note string + + // attr_reader : + RequiredProperties string + + // attr_reader : + OptionalProperties string + + // attr_reader : + Attributes string +} + +// def validate +// super +// check :warning, type: String +// check :note, type: String +// check :required_properties, type: String +// check :optional_properties, type: String +// check :attributes, type: String +// end diff --git a/mmv1/provider/terraform/examples.go b/mmv1/provider/terraform/examples.go new file mode 100644 index 000000000000..ff4ba716a24f --- /dev/null +++ b/mmv1/provider/terraform/examples.go @@ -0,0 +1,339 @@ +// Copyright 2024 Google Inc. +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package terraform + +import ( + "github.com/GoogleCloudPlatform/magic-modules/mmv1/google" +) + +// require 'uri' +// require 'api/object' +// require 'compile/core' +// require 'google/golang_utils' + +// Generates configs to be shown as examples in docs and outputted as tests +// from a shared template +type Examples struct { + google.YamlValidator + + // include Compile::Core + // include Google::GolangUtils + + // The name of the example in lower snake_case. + // Generally takes the form of the resource name followed by some detail + // about the specific test. For example, "address_with_subnetwork". + Name string + + // The id of the "primary" resource in an example. Used in import tests. + // This is the value that will appear in the Terraform config url. For + // example: + // resource "google_compute_address" {{primary_resource_id}} { + // ... + // } + PrimaryResourceId string + + // Optional resource type of the "primary" resource. Used in import tests. + // If set, this will override the default resource type implied from the + // object parent + PrimaryResourceType string + + // vars is a Hash from template variable names to output variable names. + // It will use the provided value as a prefix for generated tests, and + // insert it into the docs verbatim. + Vars map[string]string + + // Some variables need to hold special values during tests, and cannot + // be inferred by Open in Cloud Shell. For instance, org_id + // needs to be the correct value during integration tests, or else + // org tests cannot pass. Other examples include an existing project_id, + // a zone, a service account name, etc. + // + // test_env_vars is a Hash from template variable names to one of the + // following symbols: + // - :PROJECT_NAME + // - :FIRESTORE_PROJECT_NAME + // - :CREDENTIALS + // - :REGION + // - :ORG_ID + // - :ORG_TARGET + // - :BILLING_ACCT + // - :MASTER_BILLING_ACCT + // - :SERVICE_ACCT + // - :CUST_ID + // - :IDENTITY_USER + // This list corresponds to the `get*FromEnv` methods in provider_test.go. + TestEnvVars map[string]string + + // Hash to provider custom override values for generating test config + // If field my-var is set in this hash, it will replace vars[my-var] in + // tests. i.e. if vars["network"] = "my-vpc", without override: + // - doc config will have `network = "my-vpc"` + // - tests config will have `"network = my-vpc%{random_suffix}"` + // with context + // map[string]interface{}{ + // "random_suffix": acctest.RandString() + // } + // + // If test_vars_overrides["network"] = "nameOfVpc()" + // - doc config will have `network = "my-vpc"` + // - tests will replace with `"network = %{network}"` with context + // map[string]interface{}{ + // "network": nameOfVpc + // ... + // } + TestVarsOverrides map[string]string + + // Hash to provider custom override values for generating oics config + // See test_vars_overrides for more details + OicsVarsOverrides map[string]string + + // The version name of of the example's version if it's different than the + // resource version, eg. `beta` + // + // This should be the highest version of all the features used in the + // example; if there's a single beta field in an example, the example's + // min_version is beta. This is only needed if an example uses features + // with a different version than the resource; a beta resource's examples + // are all automatically versioned at beta. + // + // When an example has a version of beta, each resource must use the + // `google-beta` provider in the config. If the `google` provider is + // implicitly used, the test will fail. + // + // NOTE: Until Terraform 0.12 is released and is used in the OiCS tests, an + // explicit provider block should be defined. While the tests @ 0.12 will + // use `google-beta` automatically, past Terraform versions required an + // explicit block. + MinVersion string + + // Extra properties to ignore read on during import. + // These properties will likely be custom code. + IgnoreReadExtra []string + + // Whether to skip generating tests for this resource + SkipTest bool + + // Whether to skip generating docs for this example + SkipDocs bool + + // Whether to skip import tests for this example + SkipImportTest bool + + // The name of the primary resource for use in IAM tests. IAM tests need + // a reference to the primary resource to create IAM policies for + PrimaryResourceName string + + // The name of the location/region override for use in IAM tests. IAM + // tests may need this if the location is not inherited on the resource + // for one reason or another + RegionOverride string + + // The path to this example's Terraform config. + // Defaults to `templates/terraform/examples/{{name}}.tf.erb` + ConfigPath string + + // If the example should be skipped during VCR testing. + // This is the case when something about the resource or config causes VCR to fail for example + // a resource with a unique identifier generated within the resource via resource.UniqueId() + // Or a config with two fine grained resources that have a race condition during create + SkipVcr bool + + // Set for false by default. Set to true if you need to pull external provider for your + // testcase. Think before adding as there is latency and adds an external dependency to + // your test so avoid if you can. + PullExternal bool +} + +// func (e *Examples) config_documentation(pwd) { +// docs_defaults = { +// PROJECT_NAME: 'my-project-name', +// FIRESTORE_PROJECT_NAME: 'my-project-name', +// CREDENTIALS: 'my/credentials/filename.json', +// REGION: 'us-west1', +// ORG_ID: '123456789', +// ORG_DOMAIN: 'example.com', +// ORG_TARGET: '123456789', +// BILLING_ACCT: '000000-0000000-0000000-000000', +// MASTER_BILLING_ACCT: '000000-0000000-0000000-000000', +// SERVICE_ACCT: 'my@service-account.com', +// CUST_ID: 'A01b123xz', +// IDENTITY_USER: 'cloud_identity_user', +// PAP_DESCRIPTION: 'description' +// } +// @vars ||= {} +// @test_env_vars ||= {} +// body = lines(compile_file( +// { +// vars:, +// test_env_vars: test_env_vars.to_h { |k, v| [k, docs_defaults[v]] }, +// primary_resource_id: +// }, +// "//{pwd}///{config_path}" +// )) + +// // Remove region tags +// body = body.gsub(/// \[[a-zA-Z_ ]+\]\n/, '') +// body = body.gsub(/\n// \[[a-zA-Z_ ]+\]/, '') +// lines(compile_file( +// { content: body }, +// "//{pwd}/templates/terraform/examples/base_configs/documentation.tf.erb" +// )) +// } + +// func (e *Examples) config_test(pwd) { +// body = config_test_body(pwd) +// lines(compile_file( +// { +// content: body +// }, +// "//{pwd}/templates/terraform/examples/base_configs/test_body.go.erb" +// )) +// } + +// rubocop:disable Style/FormatStringToken +// func (e *Examples) config_test_body(pwd) { +// @vars ||= {} +// @test_env_vars ||= {} +// @test_vars_overrides ||= {} + +// // Construct map for vars to inject into config - will have +// // - "a-example-var-value%{random_suffix}"" +// // - "%{my_var}" for overrides that have custom Golang values +// rand_vars = vars.map do |k, v| +// // Some resources only allow underscores. +// testv = if v.include?('-') +// "tf-test-//{v}" +// elsif v.include?('_') +// "tf_test_//{v}" +// else +// // Some vars like descriptions shouldn't have prefix +// v +// end +// // Random suffix is 10 characters and standard name length <= 64 +// testv = "//{testv[0...54]}%{random_suffix}" +// [k, testv] +// end + +// rand_vars = rand_vars.to_h +// overrides = test_vars_overrides.to_h { |k, _| [k, "%{//{k}}"] } +// body = lines(compile_file( +// { +// vars: rand_vars.merge(overrides), +// test_env_vars: test_env_vars.to_h { |k, _| [k, "%{//{k}}"] }, +// primary_resource_id:, +// primary_resource_type: +// }, +// "//{pwd}///{config_path}" +// )) + +// // Remove region tags +// body = body.gsub(/// \[[a-zA-Z_ ]+\]\n/, '') +// body = body.gsub(/\n// \[[a-zA-Z_ ]+\]/, '') +// substitute_test_paths body +// } + +// func (e *Examples) config_oics(pwd) { +// @vars ||= [] +// @oics_vars_overrides ||= {} + +// rand_vars = vars.to_h { |k, str| [k, "//{str}-${local.name_suffix}"] } + +// // Examples with test_env_vars are skipped elsewhere +// body = lines(compile_file( +// { +// vars: rand_vars.merge(oics_vars_overrides), +// primary_resource_id: +// }, +// "//{pwd}///{config_path}" +// )) + +// // Remove region tags +// body = body.gsub(/// \[[a-zA-Z_ ]+\]\n/, '') +// body = body.gsub(/\n// \[[a-zA-Z_ ]+\]/, '') +// substitute_example_paths body +// } + +// func (e *Examples) oics_link() { +// hash = { +// cloudshell_git_repo: 'https://github.com/terraform-google-modules/docs-examples.git', +// cloudshell_working_dir: @name, +// cloudshell_image: 'gcr.io/cloudshell-images/cloudshell:latest', +// open_in_editor: 'main.tf', +// cloudshell_print: './motd', +// cloudshell_tutorial: './tutorial.md' +// } +// URI::HTTPS.build( +// host: 'console.cloud.google.com', +// path: '/cloudshell/open', +// query: URI.encode_www_form(hash) +// ) +// } + +// rubocop:disable Layout/LineLength +// func (e *Examples) substitute_test_paths(config) { +// config.gsub!('../static/img/header-logo.png', 'test-fixtures/header-logo.png') +// config.gsub!('path/to/private.key', 'test-fixtures/test.key') +// config.gsub!('path/to/certificate.crt', 'test-fixtures/test.crt') +// config.gsub!('path/to/index.zip', '%{zip_path}') +// config.gsub!('verified-domain.com', 'tf-test-domain%{random_suffix}.gcp.tfacc.hashicorptest.com') +// config.gsub!('path/to/id_rsa.pub', 'test-fixtures/ssh_rsa.pub') +// config +// } + +// func (e *Examples) substitute_example_paths(config) { +// config.gsub!('../static/img/header-logo.png', '../static/header-logo.png') +// config.gsub!('path/to/private.key', '../static/ssl_cert/test.key') +// config.gsub!('path/to/id_rsa.pub', '../static/ssh_rsa.pub') +// config.gsub!('path/to/certificate.crt', '../static/ssl_cert/test.crt') +// config +// end +// // rubocop:enable Layout/LineLength +// // rubocop:enable Style/FormatStringToken +// } + +// func (e *Examples) validate() { +// super +// check :name, type: String, required: true +// check :primary_resource_id, type: String +// check :min_version, type: String +// check :vars, type: Hash +// check :test_env_vars, type: Hash +// check :test_vars_overrides, type: Hash +// check :ignore_read_extra, type: Array, item_type: String, default: [] +// check :primary_resource_name, type: String +// check :skip_test, type: TrueClass +// check :skip_import_test, type: TrueClass +// check :skip_docs, type: TrueClass +// check :config_path, type: String, default: "templates/terraform/examples///{name}.tf.erb" +// check :skip_vcr, type: TrueClass +// check :pull_external, type: :boolean, default: false +// } + +// func (e *Examples) merge(other) { +// result = self.class.new +// instance_variables.each do |v| +// result.instance_variable_set(v, instance_variable_get(v)) +// end + +// other.instance_variables.each do |v| +// if other.instance_variable_get(v).instance_of?(Array) +// result.instance_variable_set(v, deep_merge(result.instance_variable_get(v), +// other.instance_variable_get(v))) +// else +// result.instance_variable_set(v, other.instance_variable_get(v)) +// end +// end + +// result +// } diff --git a/mmv1/provider/terraform/sub_template.rb b/mmv1/provider/terraform/sub_template.rb index e74d2ebce971..2dc16f7274f0 100644 --- a/mmv1/provider/terraform/sub_template.rb +++ b/mmv1/provider/terraform/sub_template.rb @@ -15,6 +15,13 @@ module Provider class Terraform # Functions to compile sub-templates. module SubTemplate + def build_newyaml_field(property, object, pwd) + compile_template "#{pwd}/templates/terraform/yaml_conversion_field.erb", + property:, + object:, + pwd: + end + def build_schema_property(property, object, pwd) compile_template "#{pwd}/templates/terraform/schema_property.erb", property:, diff --git a/mmv1/templates/terraform/constants/bigquery_dataset.go.erb b/mmv1/templates/terraform/constants/bigquery_dataset.go.erb index 0e89a4a63500..3a5f4b930334 100644 --- a/mmv1/templates/terraform/constants/bigquery_dataset.go.erb +++ b/mmv1/templates/terraform/constants/bigquery_dataset.go.erb @@ -1,4 +1,4 @@ -const datasetIdRegexp = `[0-9A-Za-z_]+` +const datasetIdRegexp = `^[0-9A-Za-z_]+$` func validateDatasetId(v interface{}, k string) (ws []string, errors []error) { value := v.(string) diff --git a/mmv1/templates/terraform/constants/compute_service_attachment.go.erb b/mmv1/templates/terraform/constants/compute_service_attachment.go.erb new file mode 100644 index 000000000000..ee6a557b2f22 --- /dev/null +++ b/mmv1/templates/terraform/constants/compute_service_attachment.go.erb @@ -0,0 +1,50 @@ +<%# The license inside this block applies to this file. + # Copyright 2020 Google Inc. + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. +-%> + +// Hash based on key, which is either project_id_or_num or network_url. +func computeServiceAttachmentConsumerAcceptListsHash(v interface{}) int { + if v == nil { + return 0 + } + + var buf bytes.Buffer + m := v.(map[string]interface{}) + log.Printf("[DEBUG] hashing %v", m) + + if v, ok := m["project_id_or_num"]; ok { + if v == nil { + v = "" + } + + buf.WriteString(fmt.Sprintf("%v-", v)) + } + + if v, ok := m["network_url"]; ok { + if v == nil { + v = "" + } else { + if networkUrl, err := tpgresource.GetRelativePath(v.(string)); err != nil { + log.Printf("[WARN] Error on retrieving relative path of network url: %s", err) + } else { + v = networkUrl + } + } + + buf.WriteString(fmt.Sprintf("%v-", v)) + } + + log.Printf("[DEBUG] computed hash value of %v from %v", tpgresource.Hashcode(buf.String()), buf.String()) + return tpgresource.Hashcode(buf.String()) +} diff --git a/mmv1/templates/terraform/custom_check_destroy/firestore_field.go.erb b/mmv1/templates/terraform/custom_check_destroy/firestore_field.go.erb index f3caa9775cca..7b5fe9d1fffc 100644 --- a/mmv1/templates/terraform/custom_check_destroy/firestore_field.go.erb +++ b/mmv1/templates/terraform/custom_check_destroy/firestore_field.go.erb @@ -16,7 +16,16 @@ res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ UserAgent: config.UserAgent, }) if err != nil { - return err + e := err.(*googleapi.Error) + if e.Code == 403 && strings.Contains(e.Message, "Cloud Firestore API has not been used in project") { + // The acceptance test has provisioned the resources under test in a new project, and the destory check is seeing the + // effects of the project not existing. This means the service isn't enabled, and that the resource is definitely destroyed. + // We do not return the error in this case - destroy was successful + return nil + } + + // Return err in all other cases + return err } if v := res["indexConfig"]; v != nil { diff --git a/mmv1/templates/terraform/decoders/compute_region_target_https_proxy.go.erb b/mmv1/templates/terraform/decoders/compute_region_target_https_proxy.go.erb new file mode 100644 index 000000000000..0a72fe1c476a --- /dev/null +++ b/mmv1/templates/terraform/decoders/compute_region_target_https_proxy.go.erb @@ -0,0 +1,15 @@ +// Since both sslCertificates and certificateManagerCertificates maps to the same API field (sslCertificates), we need to check the types +// of certificates that exist in the array and decide whether to change the field to certificateManagerCertificate or not. +// The decoder logic depends on the fact that the API does not allow mixed type of certificates and it returns +// certificate manager certificates in the format of //certificatemanager.googleapis.com/projects/*/locations/*/certificates/* +if sslCertificates, ok := res["sslCertificates"].([]interface{}); ok && len(sslCertificates) > 0 { + regPat, _ := regexp.Compile("//certificatemanager.googleapis.com/projects/(.*)/locations/(.*)/certificates/(.*)") + + if regPat.MatchString(sslCertificates[0].(string)) { + // It is enough to check only the type of one of the provided certificates beacuse all the certificates should be the same type. + log.Printf("[DEBUG] The field sslCertificates contains certificateManagerCertificates, the field name will be converted to certificateManagerCertificates") + res["certificateManagerCertificates"] = res["sslCertificates"] + delete(res, "sslCertificates") + } +} +return res, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/encoders/compute_region_target_https_proxy.go.erb b/mmv1/templates/terraform/encoders/compute_region_target_https_proxy.go.erb new file mode 100644 index 000000000000..168d4a65c5ef --- /dev/null +++ b/mmv1/templates/terraform/encoders/compute_region_target_https_proxy.go.erb @@ -0,0 +1,10 @@ + +if _, ok := obj["certificateManagerCertificates"]; ok { + // The field certificateManagerCertificates should not be included in the API request, and it should be renamed to `sslCertificates` + // The API does not allow using both certificate manager certificates and sslCertificates. If that changes + // in the future, the encoder logic should change accordingly because this will mean that both fields are no longer mutual exclusive. + log.Printf("[DEBUG] converting the field CertificateManagerCertificates to sslCertificates before sending the request") + obj["sslCertificates"] = obj["certificateManagerCertificates"] + delete(obj, "certificateManagerCertificates") +} +return obj, nil \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/app_engine_application_url_dispatch_rules_basic.tf.erb b/mmv1/templates/terraform/examples/app_engine_application_url_dispatch_rules_basic.tf.erb index dad8252eecaa..2bda703ff567 100644 --- a/mmv1/templates/terraform/examples/app_engine_application_url_dispatch_rules_basic.tf.erb +++ b/mmv1/templates/terraform/examples/app_engine_application_url_dispatch_rules_basic.tf.erb @@ -15,7 +15,7 @@ resource "google_app_engine_application_url_dispatch_rules" "<%= ctx[:primary_re resource "google_app_engine_standard_app_version" "admin_v3" { version_id = "v3" service = "admin" - runtime = "nodejs10" + runtime = "nodejs20" entrypoint { shell = "node ./app.js" diff --git a/mmv1/templates/terraform/examples/app_engine_service_network_settings.tf.erb b/mmv1/templates/terraform/examples/app_engine_service_network_settings.tf.erb index 53f48b2c235c..3bef5a62a6ef 100644 --- a/mmv1/templates/terraform/examples/app_engine_service_network_settings.tf.erb +++ b/mmv1/templates/terraform/examples/app_engine_service_network_settings.tf.erb @@ -14,7 +14,7 @@ resource "google_app_engine_standard_app_version" "internalapp" { service = "internalapp" delete_service_on_destroy = true - runtime = "nodejs10" + runtime = "nodejs20" entrypoint { shell = "node ./app.js" } diff --git a/mmv1/templates/terraform/examples/app_engine_service_split_traffic.tf.erb b/mmv1/templates/terraform/examples/app_engine_service_split_traffic.tf.erb index 8c85ce8bd13f..8e2ff3ecbece 100644 --- a/mmv1/templates/terraform/examples/app_engine_service_split_traffic.tf.erb +++ b/mmv1/templates/terraform/examples/app_engine_service_split_traffic.tf.erb @@ -14,7 +14,7 @@ resource "google_app_engine_standard_app_version" "liveapp_v1" { service = "liveapp" delete_service_on_destroy = true - runtime = "nodejs10" + runtime = "nodejs20" entrypoint { shell = "node ./app.js" } @@ -33,7 +33,7 @@ resource "google_app_engine_standard_app_version" "liveapp_v2" { service = "liveapp" noop_on_destroy = true - runtime = "nodejs10" + runtime = "nodejs20" entrypoint { shell = "node ./app.js" } diff --git a/mmv1/templates/terraform/examples/app_engine_standard_app_version.tf.erb b/mmv1/templates/terraform/examples/app_engine_standard_app_version.tf.erb index 64234c1dba0f..75696c694955 100644 --- a/mmv1/templates/terraform/examples/app_engine_standard_app_version.tf.erb +++ b/mmv1/templates/terraform/examples/app_engine_standard_app_version.tf.erb @@ -18,7 +18,7 @@ resource "google_project_iam_member" "storage_viewer" { resource "google_app_engine_standard_app_version" "<%= ctx[:primary_resource_id] %>" { version_id = "v1" service = "myapp" - runtime = "nodejs10" + runtime = "nodejs20" entrypoint { shell = "node ./app.js" @@ -55,7 +55,7 @@ resource "google_app_engine_standard_app_version" "<%= ctx[:primary_resource_id] resource "google_app_engine_standard_app_version" "myapp_v2" { version_id = "v2" service = "myapp" - runtime = "nodejs10" + runtime = "nodejs20" app_engine_apis = true entrypoint { diff --git a/mmv1/templates/terraform/examples/apphub_application_basic.tf.erb b/mmv1/templates/terraform/examples/apphub_application_basic.tf.erb new file mode 100644 index 000000000000..7b7c50231dca --- /dev/null +++ b/mmv1/templates/terraform/examples/apphub_application_basic.tf.erb @@ -0,0 +1,7 @@ +resource "google_apphub_application" "<%= ctx[:primary_resource_id] %>" { + location = "us-east1" + application_id = "<%= ctx[:vars]['application_id'] %>" + scope { + type = "REGIONAL" + } +} diff --git a/mmv1/templates/terraform/examples/apphub_application_full.tf.erb b/mmv1/templates/terraform/examples/apphub_application_full.tf.erb new file mode 100644 index 000000000000..42d9de520464 --- /dev/null +++ b/mmv1/templates/terraform/examples/apphub_application_full.tf.erb @@ -0,0 +1,29 @@ +resource "google_apphub_application" "<%= ctx[:primary_resource_id] %>" { + location = "us-east1" + application_id = "<%= ctx[:vars]['application_id'] %>" + display_name = "<%= ctx[:vars]['display_name'] %>" + scope { + type = "REGIONAL" + } + description = "<%= ctx[:vars]['description'] %>" + attributes { + environment { + type = "STAGING" + } + criticality { + type = "MISSION_CRITICAL" + } + business_owners { + display_name = "<%= ctx[:vars]['business_name'] %>" + email = "<%= ctx[:vars]['business_email'] %>" + } + developer_owners { + display_name = "<%= ctx[:vars]['developer_name'] %>" + email = "<%= ctx[:vars]['developer_email'] %>" + } + operator_owners { + display_name = "<%= ctx[:vars]['operator_name'] %>" + email = "<%= ctx[:vars]['operator_email'] %>" + } + } +} diff --git a/mmv1/templates/terraform/examples/big_query_routine_basic.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_basic.tf.erb similarity index 100% rename from mmv1/templates/terraform/examples/big_query_routine_basic.tf.erb rename to mmv1/templates/terraform/examples/bigquery_routine_basic.tf.erb diff --git a/mmv1/templates/terraform/examples/big_query_routine_json.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_json.tf.erb similarity index 100% rename from mmv1/templates/terraform/examples/big_query_routine_json.tf.erb rename to mmv1/templates/terraform/examples/bigquery_routine_json.tf.erb diff --git a/mmv1/templates/terraform/examples/big_query_routine_pyspark.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_pyspark.tf.erb similarity index 100% rename from mmv1/templates/terraform/examples/big_query_routine_pyspark.tf.erb rename to mmv1/templates/terraform/examples/bigquery_routine_pyspark.tf.erb diff --git a/mmv1/templates/terraform/examples/big_query_routine_pyspark_mainfile.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_pyspark_mainfile.tf.erb similarity index 100% rename from mmv1/templates/terraform/examples/big_query_routine_pyspark_mainfile.tf.erb rename to mmv1/templates/terraform/examples/bigquery_routine_pyspark_mainfile.tf.erb diff --git a/mmv1/templates/terraform/examples/bigquery_routine_remote_function.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_remote_function.tf.erb new file mode 100644 index 000000000000..9ef5b2a0dd2a --- /dev/null +++ b/mmv1/templates/terraform/examples/bigquery_routine_remote_function.tf.erb @@ -0,0 +1,27 @@ +resource "google_bigquery_dataset" "test" { + dataset_id = "<%= ctx[:vars]['dataset_id'] %>" +} + +resource "google_bigquery_connection" "test" { + connection_id = "<%= ctx[:vars]['connection_id'] %>" + location = "US" + cloud_resource { } +} + +resource "google_bigquery_routine" "<%= ctx[:primary_resource_id] %>" { + dataset_id = google_bigquery_dataset.test.dataset_id + routine_id = "<%= ctx[:vars]['routine_id'] %>" + routine_type = "SCALAR_FUNCTION" + definition_body = "" + + return_type = "{\"typeKind\" : \"STRING\"}" + + remote_function_options { + endpoint = "https://us-east1-my_gcf_project.cloudfunctions.net/remote_add" + connection = google_bigquery_connection.test.name + max_batching_rows = "10" + user_defined_context = { + "z": "1.5", + } + } +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/big_query_routine_spark_jar.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_spark_jar.tf.erb similarity index 100% rename from mmv1/templates/terraform/examples/big_query_routine_spark_jar.tf.erb rename to mmv1/templates/terraform/examples/bigquery_routine_spark_jar.tf.erb diff --git a/mmv1/templates/terraform/examples/big_query_routine_tvf.tf.erb b/mmv1/templates/terraform/examples/bigquery_routine_tvf.tf.erb similarity index 100% rename from mmv1/templates/terraform/examples/big_query_routine_tvf.tf.erb rename to mmv1/templates/terraform/examples/bigquery_routine_tvf.tf.erb diff --git a/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_basic.tf.erb b/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_basic.tf.erb index c5abd47bc47b..a63cbe5392b2 100644 --- a/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_basic.tf.erb +++ b/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_basic.tf.erb @@ -1,6 +1,7 @@ resource "google_certificate_manager_dns_authorization" "<%= ctx[:primary_resource_id] %>" { name = "<%= ctx[:vars]['dns_auth_name'] %>" - description = "The default dnss" + location = "global" + description = "The default dns" domain = "<%= ctx[:vars]['subdomain'] %>.hashicorptest.com" } diff --git a/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_regional.tf.erb b/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_regional.tf.erb new file mode 100644 index 000000000000..21df9b21c914 --- /dev/null +++ b/mmv1/templates/terraform/examples/certificate_manager_dns_authorization_regional.tf.erb @@ -0,0 +1,7 @@ +resource "google_certificate_manager_dns_authorization" "<%= ctx[:primary_resource_id] %>" { + name = "<%= ctx[:vars]['dns_auth_name'] %>" + location = "us-central1" + description = "reginal dns" + type = "PER_PROJECT_RECORD" + domain = "<%= ctx[:vars]['subdomain'] %>.hashicorptest.com" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/certificate_manager_google_managed_regional_certificate_dns_auth.tf.erb b/mmv1/templates/terraform/examples/certificate_manager_google_managed_regional_certificate_dns_auth.tf.erb new file mode 100644 index 000000000000..f49a6cf70d8f --- /dev/null +++ b/mmv1/templates/terraform/examples/certificate_manager_google_managed_regional_certificate_dns_auth.tf.erb @@ -0,0 +1,19 @@ +resource "google_certificate_manager_certificate" "<%= ctx[:primary_resource_id] %>" { + name = "<%= ctx[:vars]['cert_name'] %>" + description = "regional managed certs" + location = "us-central1" + managed { + domains = [ + google_certificate_manager_dns_authorization.instance.domain, + ] + dns_authorizations = [ + google_certificate_manager_dns_authorization.instance.id, + ] + } +} +resource "google_certificate_manager_dns_authorization" "instance" { + name = "<%= ctx[:vars]['dns_auth_name'] %>" + location = "us-central1" + description = "The default dnss" + domain = "<%= ctx[:vars]['dns_auth_subdomain'] %>.hashicorptest.com" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/clouddeploy_automation_basic.tf.erb b/mmv1/templates/terraform/examples/clouddeploy_automation_basic.tf.erb index fa7ad4fb230e..41b460f1aefb 100644 --- a/mmv1/templates/terraform/examples/clouddeploy_automation_basic.tf.erb +++ b/mmv1/templates/terraform/examples/clouddeploy_automation_basic.tf.erb @@ -1,5 +1,4 @@ resource "google_clouddeploy_automation" "<%= ctx[:primary_resource_id] %>" { - provider = google-beta name = "<%= ctx[:vars]['automation'] %>" project = google_clouddeploy_delivery_pipeline.pipeline.project location = google_clouddeploy_delivery_pipeline.pipeline.location @@ -19,7 +18,6 @@ resource "google_clouddeploy_automation" "<%= ctx[:primary_resource_id] %>" { } resource "google_clouddeploy_delivery_pipeline" "pipeline" { - provider = google-beta name = "<%= ctx[:vars]['delivery_pipeline'] %>" location = "us-central1" serial_pipeline { diff --git a/mmv1/templates/terraform/examples/clouddeploy_automation_full.tf.erb b/mmv1/templates/terraform/examples/clouddeploy_automation_full.tf.erb index 8cd21d0e10d3..51bf98d1bca1 100644 --- a/mmv1/templates/terraform/examples/clouddeploy_automation_full.tf.erb +++ b/mmv1/templates/terraform/examples/clouddeploy_automation_full.tf.erb @@ -1,5 +1,4 @@ resource "google_clouddeploy_automation" "<%= ctx[:primary_resource_id] %>" { - provider = google-beta name = "<%= ctx[:vars]['automation'] %>" location = "us-central1" delivery_pipeline = google_clouddeploy_delivery_pipeline.pipeline.name @@ -40,7 +39,6 @@ resource "google_clouddeploy_automation" "<%= ctx[:primary_resource_id] %>" { } resource "google_clouddeploy_delivery_pipeline" "pipeline" { - provider = google-beta name = "<%= ctx[:vars]['delivery_pipeline'] %>" location = "us-central1" serial_pipeline { diff --git a/mmv1/templates/terraform/examples/clouddeploy_target_basic.tf.erb b/mmv1/templates/terraform/examples/clouddeploy_target_basic.tf.erb new file mode 100644 index 000000000000..5fd51566834c --- /dev/null +++ b/mmv1/templates/terraform/examples/clouddeploy_target_basic.tf.erb @@ -0,0 +1,4 @@ +resource "google_clouddeploy_target" "<%= ctx[:primary_resource_id] %>" { + name = "<%= ctx[:vars]['target'] %>" + location = "us-central1" + } diff --git a/mmv1/templates/terraform/examples/firebase_app_check_device_check_config_full.tf.erb b/mmv1/templates/terraform/examples/firebase_app_check_device_check_config_full.tf.erb new file mode 100644 index 000000000000..72b6a96fbafb --- /dev/null +++ b/mmv1/templates/terraform/examples/firebase_app_check_device_check_config_full.tf.erb @@ -0,0 +1,34 @@ +resource "google_firebase_apple_app" "default" { + provider = google-beta + + project = "<%= ctx[:test_env_vars]['project_id'] %>" + display_name = "Apple app" + bundle_id = "<%= ctx[:vars]['bundle_id'] %>" + team_id = "<%= ctx[:vars]['team_id'] %>" +} + +# It takes a while for App Check to recognize the new app +# If your app already exists, you don't have to wait 30 seconds. +resource "time_sleep" "wait_30s" { + depends_on = [google_firebase_apple_app.default] + create_duration = "30s" +} + +resource "google_firebase_app_check_device_check_config" "default" { + provider = google-beta + + project = "<%= ctx[:test_env_vars]['project_id'] %>" + app_id = google_firebase_apple_app.default.app_id + token_ttl = "<%= ctx[:vars]['token_ttl'] %>" + key_id = "<%= ctx[:vars]['key_id'] %>" + private_key = file("<%= ctx[:vars]['private_key_path'] %>") + + depends_on = [time_sleep.wait_30s] + + lifecycle { + precondition { + condition = google_firebase_apple_app.default.team_id != "" + error_message = "Provide a Team ID on the Apple App to use App Check" + } + } +} diff --git a/mmv1/templates/terraform/examples/firestore_cmek_database.tf.erb b/mmv1/templates/terraform/examples/firestore_cmek_database.tf.erb new file mode 100644 index 000000000000..330b16d32eab --- /dev/null +++ b/mmv1/templates/terraform/examples/firestore_cmek_database.tf.erb @@ -0,0 +1,50 @@ +data "google_project" "project" { + provider = google-beta +} + +resource "google_firestore_database" "<%= ctx[:primary_resource_id] %>" { + provider = google-beta + + project = "<%= ctx[:test_env_vars]['project_id'] %>" + name = "<%= ctx[:vars]['database_id']%>" + location_id = "nam5" + type = "FIRESTORE_NATIVE" + concurrency_mode = "OPTIMISTIC" + app_engine_integration_mode = "DISABLED" + point_in_time_recovery_enablement = "POINT_IN_TIME_RECOVERY_ENABLED" + delete_protection_state = "<%= ctx[:vars]['delete_protection_state'] %>" + deletion_policy = "DELETE" + cmek_config { + kms_key_name = google_kms_crypto_key.crypto_key.id + } + + depends_on = [ + google_kms_crypto_key_iam_binding.firestore_cmek_keyuser + ] +} + +resource "google_kms_crypto_key" "crypto_key" { + provider = google-beta + + name = "<%= ctx[:vars]['kms_key_name'] %>" + key_ring = google_kms_key_ring.key_ring.id + purpose = "ENCRYPT_DECRYPT" +} + +resource "google_kms_key_ring" "key_ring" { + provider = google-beta + + name = "<%= ctx[:vars]['kms_key_ring_name'] %>" + location = "us" +} + +resource "google_kms_crypto_key_iam_binding" "firestore_cmek_keyuser" { + provider = google-beta + + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + members = [ + "serviceAccount:service-${data.google_project.project.number}@gcp-sa-firestore.iam.gserviceaccount.com", + ] +} diff --git a/mmv1/templates/terraform/examples/firestore_cmek_database_in_datastore_mode.tf.erb b/mmv1/templates/terraform/examples/firestore_cmek_database_in_datastore_mode.tf.erb new file mode 100644 index 000000000000..ac2a33a8ce06 --- /dev/null +++ b/mmv1/templates/terraform/examples/firestore_cmek_database_in_datastore_mode.tf.erb @@ -0,0 +1,50 @@ +data "google_project" "project" { + provider = google-beta +} + +resource "google_firestore_database" "<%= ctx[:primary_resource_id] %>" { + provider = google-beta + + project = "<%= ctx[:test_env_vars]['project_id'] %>" + name = "<%= ctx[:vars]['database_id']%>" + location_id = "nam5" + type = "DATASTORE_MODE" + concurrency_mode = "OPTIMISTIC" + app_engine_integration_mode = "DISABLED" + point_in_time_recovery_enablement = "POINT_IN_TIME_RECOVERY_ENABLED" + delete_protection_state = "<%= ctx[:vars]['delete_protection_state'] %>" + deletion_policy = "DELETE" + cmek_config { + kms_key_name = google_kms_crypto_key.crypto_key.id + } + + depends_on = [ + google_kms_crypto_key_iam_binding.firestore_cmek_keyuser + ] +} + +resource "google_kms_crypto_key" "crypto_key" { + provider = google-beta + + name = "<%= ctx[:vars]['kms_key_name'] %>" + key_ring = google_kms_key_ring.key_ring.id + purpose = "ENCRYPT_DECRYPT" +} + +resource "google_kms_key_ring" "key_ring" { + provider = google-beta + + name = "<%= ctx[:vars]['kms_key_ring_name'] %>" + location = "us" +} + +resource "google_kms_crypto_key_iam_binding" "firestore_cmek_keyuser" { + provider = google-beta + + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + members = [ + "serviceAccount:service-${data.google_project.project.number}@gcp-sa-firestore.iam.gserviceaccount.com", + ] +} diff --git a/mmv1/templates/terraform/examples/gkehub_scope_basic.tf.erb b/mmv1/templates/terraform/examples/gkehub_scope_basic.tf.erb index 107fa956c82d..63efb8be3c78 100644 --- a/mmv1/templates/terraform/examples/gkehub_scope_basic.tf.erb +++ b/mmv1/templates/terraform/examples/gkehub_scope_basic.tf.erb @@ -1,5 +1,10 @@ resource "google_gke_hub_scope" "<%= ctx[:primary_resource_id] %>" { scope_id = "<%= ctx[:vars]['resource_name'] %>" + namespace_labels = { + keyb = "valueb" + keya = "valuea" + keyc = "valuec" + } labels = { keyb = "valueb" keya = "valuea" diff --git a/mmv1/templates/terraform/examples/gkeonprem_vmware_cluster_f5lb.tf.erb b/mmv1/templates/terraform/examples/gkeonprem_vmware_cluster_f5lb.tf.erb index 3f9f6a15c69b..dbab108ee6bf 100644 --- a/mmv1/templates/terraform/examples/gkeonprem_vmware_cluster_f5lb.tf.erb +++ b/mmv1/templates/terraform/examples/gkeonprem_vmware_cluster_f5lb.tf.erb @@ -21,6 +21,7 @@ resource "google_gkeonprem_vmware_cluster" "<%= ctx[:primary_resource_id] %>" { gateway="test-gateway" } } + vcenter_network = "test-vcenter-network" } control_plane_node { cpus = 4 diff --git a/mmv1/templates/terraform/examples/iap_app_engine_service.tf.erb b/mmv1/templates/terraform/examples/iap_app_engine_service.tf.erb index 37f79449f02b..dff90f5307e6 100644 --- a/mmv1/templates/terraform/examples/iap_app_engine_service.tf.erb +++ b/mmv1/templates/terraform/examples/iap_app_engine_service.tf.erb @@ -36,7 +36,7 @@ resource "google_app_engine_standard_app_version" "version" { project = google_app_engine_application.app.project version_id = "v2" service = "default" - runtime = "nodejs10" + runtime = "nodejs20" noop_on_destroy = true // TODO: Removed basic scaling once automatic_scaling refresh behavior is fixed. diff --git a/mmv1/templates/terraform/examples/iap_app_engine_version.tf.erb b/mmv1/templates/terraform/examples/iap_app_engine_version.tf.erb index 92df16cdac3b..6cf3eef2d0d8 100644 --- a/mmv1/templates/terraform/examples/iap_app_engine_version.tf.erb +++ b/mmv1/templates/terraform/examples/iap_app_engine_version.tf.erb @@ -12,7 +12,7 @@ resource "google_storage_bucket_object" "object" { resource "google_app_engine_standard_app_version" "version" { version_id = "%{random_suffix}" service = "default" - runtime = "nodejs10" + runtime = "nodejs20" noop_on_destroy = false entrypoint { diff --git a/mmv1/templates/terraform/examples/kms_ekm_connection_basic.tf.erb b/mmv1/templates/terraform/examples/kms_ekm_connection_basic.tf.erb new file mode 100644 index 000000000000..49fcd7f8769c --- /dev/null +++ b/mmv1/templates/terraform/examples/kms_ekm_connection_basic.tf.erb @@ -0,0 +1,12 @@ +resource "google_kms_ekm_connection" "<%= ctx[:primary_resource_id] %>" { + name = "<%= ctx[:vars]['ekmconnection_name'] %>" + location = "us-central1" + key_management_mode = "MANUAL" + service_resolvers { + service_directory_service = "projects/project_id/locations/us-central1/namespaces/namespace_name/services/service_name" + hostname = "example-ekm.goog" + server_certificates { + raw_der = "==HAwIBCCAr6gAwIBAgIUWR+EV4lqiV7Ql12VY==" + } + } +} diff --git a/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine_empty.tf.erb b/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine_empty.tf.erb new file mode 100644 index 000000000000..fe794ed86e7b --- /dev/null +++ b/mmv1/templates/terraform/examples/region_network_endpoint_group_appengine_empty.tf.erb @@ -0,0 +1,8 @@ +// App Engine Example +resource "google_compute_region_network_endpoint_group" "<%= ctx[:primary_resource_id] %>" { + name = "<%= ctx[:vars]['neg_name'] %>" + network_endpoint_type = "SERVERLESS" + region = "us-central1" + app_engine { + } +} diff --git a/mmv1/templates/terraform/examples/region_target_https_proxy_certificate_manager_certificate.tf.erb b/mmv1/templates/terraform/examples/region_target_https_proxy_certificate_manager_certificate.tf.erb new file mode 100644 index 000000000000..05c8d90b46f9 --- /dev/null +++ b/mmv1/templates/terraform/examples/region_target_https_proxy_certificate_manager_certificate.tf.erb @@ -0,0 +1,28 @@ +resource "google_compute_region_target_https_proxy" "<%= ctx[:primary_resource_id] %>" { + name = "<%= ctx[:vars]['region_target_https_proxy_name'] %>" + url_map = google_compute_region_url_map.default.id + certificate_manager_certificates = ["//certificatemanager.googleapis.com/${google_certificate_manager_certificate.default.id}"] # [google_certificate_manager_certificate.default.id] is also acceptable +} + +resource "google_certificate_manager_certificate" "default" { + name = "<%= ctx[:vars]['certificate_manager_certificate_name'] %>" + location = "us-central1" + self_managed { + pem_certificate = file("test-fixtures/cert.pem") + pem_private_key = file("test-fixtures/private-key.pem") + } +} + +resource "google_compute_region_url_map" "default" { + name = "<%= ctx[:vars]['region_url_map_name'] %>" + default_service = google_compute_region_backend_service.default.id + region = "us-central1" +} + +resource "google_compute_region_backend_service" "default" { + name = "<%= ctx[:vars]['region_backend_service_name'] %>" + region = "us-central1" + protocol = "HTTPS" + timeout_sec = 30 + load_balancing_scheme = "INTERNAL_MANAGED" +} \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/securityposture_posture_basic.tf.erb b/mmv1/templates/terraform/examples/securityposture_posture_basic.tf.erb index 9101034c48c2..32205bb0ae71 100644 --- a/mmv1/templates/terraform/examples/securityposture_posture_basic.tf.erb +++ b/mmv1/templates/terraform/examples/securityposture_posture_basic.tf.erb @@ -1,5 +1,5 @@ resource "google_securityposture_posture" "<%= ctx[:primary_resource_id] %>"{ - posture_id = "posture_1" + posture_id = "posture_example" parent = "organizations/<%= ctx[:test_env_vars]['org_id'] %>" location = "global" state = "ACTIVE" diff --git a/mmv1/templates/terraform/examples/securityposture_posture_deployment_basic.tf.erb b/mmv1/templates/terraform/examples/securityposture_posture_deployment_basic.tf.erb index 8a514eb735e2..45cd4d886073 100644 --- a/mmv1/templates/terraform/examples/securityposture_posture_deployment_basic.tf.erb +++ b/mmv1/templates/terraform/examples/securityposture_posture_deployment_basic.tf.erb @@ -1,4 +1,4 @@ -resource "google_securityposture_posture" "posture1" { +resource "google_securityposture_posture" "posture_1" { posture_id = "posture_1" parent = "organizations/<%= ctx[:test_env_vars]['org_id'] %>" location = "global" @@ -27,6 +27,6 @@ resource "google_securityposture_posture_deployment" "<%= ctx[:primary_resource_ location = "global" description = "a new posture deployment" target_resource = "projects/<%= ctx[:test_env_vars]['project_number'] %>" - posture_id = google_securityposture_posture.posture1.name - posture_revision_id = google_securityposture_posture.posture1.revision_id + posture_id = google_securityposture_posture.posture_1.name + posture_revision_id = google_securityposture_posture.posture_1.revision_id } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/service_attachment_explicit_networks.tf.erb b/mmv1/templates/terraform/examples/service_attachment_explicit_networks.tf.erb new file mode 100644 index 000000000000..a66f689582af --- /dev/null +++ b/mmv1/templates/terraform/examples/service_attachment_explicit_networks.tf.erb @@ -0,0 +1,97 @@ +resource "google_compute_service_attachment" "<%= ctx[:primary_resource_id] %>" { + name = "<%= ctx[:vars]['service_attachment_name'] %>" + region = "us-west2" + description = "A service attachment configured with Terraform" + + enable_proxy_protocol = false + + connection_preference = "ACCEPT_MANUAL" + nat_subnets = [google_compute_subnetwork.psc_ilb_nat.id] + target_service = google_compute_forwarding_rule.psc_ilb_target_service.id + + consumer_accept_lists { + network_url = google_compute_network.psc_ilb_consumer_network.self_link + connection_limit = 1 + } +} + +resource "google_compute_network" "psc_ilb_consumer_network" { + name = "<%= ctx[:vars]['consumer_network_name'] %>" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "psc_ilb_consumer_subnetwork" { + name = "<%= ctx[:vars]['consumer_network_name'] %>" + ip_cidr_range = "10.0.0.0/16" + region = "us-west2" + network = google_compute_network.psc_ilb_consumer_network.id +} + +resource "google_compute_address" "psc_ilb_consumer_address" { + name = "<%= ctx[:vars]['consumer_address_name'] %>" + region = "us-west2" + + subnetwork = google_compute_subnetwork.psc_ilb_consumer_subnetwork.id + address_type = "INTERNAL" +} + +resource "google_compute_forwarding_rule" "psc_ilb_consumer" { + name = "<%= ctx[:vars]['consumer_forwarding_rule_name'] %>" + region = "us-west2" + + target = google_compute_service_attachment.psc_ilb_service_attachment.id + load_balancing_scheme = "" # need to override EXTERNAL default when target is a service attachment + network = google_compute_network.psc_ilb_consumer_network.id + subnetwork = google_compute_subnetwork.psc_ilb_consumer_subnetwork.id + ip_address = google_compute_address.psc_ilb_consumer_address.id +} + +resource "google_compute_forwarding_rule" "psc_ilb_target_service" { + name = "<%= ctx[:vars]['producer_forwarding_rule_name'] %>" + region = "us-west2" + + load_balancing_scheme = "INTERNAL" + backend_service = google_compute_region_backend_service.producer_service_backend.id + all_ports = true + network = google_compute_network.psc_ilb_network.name + subnetwork = google_compute_subnetwork.psc_ilb_producer_subnetwork.name +} + +resource "google_compute_region_backend_service" "producer_service_backend" { + name = "<%= ctx[:vars]['producer_service_name'] %>" + region = "us-west2" + + health_checks = [google_compute_health_check.producer_service_health_check.id] +} + +resource "google_compute_health_check" "producer_service_health_check" { + name = "<%= ctx[:vars]['producer_health_check_name'] %>" + + check_interval_sec = 1 + timeout_sec = 1 + tcp_health_check { + port = "80" + } +} + +resource "google_compute_network" "psc_ilb_network" { + name = "<%= ctx[:vars]['network_name'] %>" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "psc_ilb_producer_subnetwork" { + name = "<%= ctx[:vars]['producer_subnetwork_name'] %>" + region = "us-west2" + + network = google_compute_network.psc_ilb_network.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_subnetwork" "psc_ilb_nat" { + name = "<%= ctx[:vars]['nat_subnetwork_name'] %>" + region = "us-west2" + + network = google_compute_network.psc_ilb_network.id + purpose = "PRIVATE_SERVICE_CONNECT" + ip_cidr_range = "10.1.0.0/16" +} diff --git a/mmv1/templates/terraform/product_yaml_conversion.erb b/mmv1/templates/terraform/product_yaml_conversion.erb new file mode 100644 index 000000000000..cade809c4334 --- /dev/null +++ b/mmv1/templates/terraform/product_yaml_conversion.erb @@ -0,0 +1,135 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Warning: This is a temporary file, and should not be edited directly +--- +<% +#names +-%> +name: '<%= object.__product.name %>' +<% unless object.__product.legacy_name.nil? -%> +legacy_name: '<%= object.__product.legacy_name %>' +<% end -%> +<% unless object.__product.display_name.nil? -%> +display_name: '<%= object.__product.display_name %>' +<% end -%> +<% unless object.__product.client_name.nil? -%> +client_name: '<%= object.__product.client_name %>' +<% end -%> +<% +#versions +-%> +<% unless object.__product.versions.empty? -%> +versions: +<% object.__product.versions.each do |version| -%> + - name: '<%= version.name %>' +<% unless version.base_url.nil? -%> + base_url: '<%= version.base_url %>' +<% end -%> +<% unless version.cai_base_url.nil? -%> + cai_base_url: '<%= version.cai_base_url %>' +<% end -%> +<% end -%> +<% end -%> +<% +#scopes +-%> +<% unless object.__product.scopes.nil? -%> +scopes: +<% object.__product.scopes.each do |scope| -%> + - '<%= scope -%>' +<% end -%> +<% end -%> +<% +#async +-%> +<% unless object.__product.async.nil? -%> +async: +<% if object.__product.async.is_a? Provider::Terraform::PollAsync -%> +<% unless object.__product.async.check_response_func_existence.nil? -%> + check_response_func_existence: '<%= object.__product.async.check_response_func_existence %>' +<% end -%> +<% unless object.__product.async.check_response_func_absence.nil? -%> + check_response_func_absence: '<%= object.__product.async.check_response_func_absence %>' +<% end -%> +<% unless object.__product.async.custom_poll_read.nil? -%> + custom_poll_read: '<%= object.__product.async.custom_poll_read %>' +<% end -%> +<% unless object.__product.async.suppress_error.nil? -%> + suppress_error: <%= object.__product.async.suppress_error %> +<% end -%> +<% unless object.__product.async.target_occurrences.nil? -%> + target_occurrences: <%= object.__product.async.target_occurrences %> +<% end -%> +<% end -%> +<% if object.__product.async.is_a? Api::OpAsync -%> +<% #async.operation -%> +<% unless object.__product.async.operation.nil? -%> + operation: +<% unless object.__product.async.operation.base_url.nil? -%> + base_url: '<%= object.__product.async.operation.base_url %>' +<% end -%> +<% unless object.__product.async.operation.full_url.nil? -%> + full_url: '<%= object.__product.async.operation.full_url %>' +<% end -%> +<% unless object.__product.async.operation.kind.nil? -%> + kind: '<%= object.__product.async.operation.kind %>' +<% end -%> +<% unless object.__product.async.operation.path.nil? -%> + path: '<%= object.__product.async.operation.path %>' +<% end -%> +<% unless object.__product.async.operation.wait_ms.nil? -%> + wait_ms: <%= object.__product.async.operation.wait_ms %> +<% end -%> +<% #async.operation.timeouts -%> +<% unless object.__product.async.operation.timeouts.nil? -%> + timeouts: +<% unless object.__product.async.operation.timeouts.insert_minutes.nil? -%> + insert_minutes: <%= object.__product.async.operation.timeouts.insert_minutes %> +<% end -%> +<% unless object.__product.async.operation.timeouts.update_minutes.nil? -%> + update_minutes: <%= object.__product.async.operation.timeouts.update_minutes %> +<% end -%> +<% unless object.__product.async.operation.timeouts.delete_minutes.nil? -%> + delete_minutes: <%= object.__product.async.operation.timeouts.delete_minutes %> +<% end -%> +<% end -%> +<% end -%> +<% #async.result -%> +<% unless object.__product.async.result.nil? -%> + result: +<% unless object.__product.async.result.path.nil? -%> + path: '<%= object.__product.async.result.path %>' +<% end -%> +<% unless object.__product.async.result.resource_inside_response.nil? -%> + resource_inside_response: <%= object.__product.async.result.resource_inside_response %> +<% end -%> +<% end -%> +<% #async.error -%> +<% unless object.__product.async.error.nil? -%> + error: +<% unless object.__product.async.error.path.nil? -%> + path: '<%= object.__product.async.error.path %>' +<% end -%> +<% unless object.__product.async.error.message.nil? -%> + message: '<%= object.__product.async.error.message %>' +<% end -%> +<% end -%> +<% end -%> +<% end -%> +<% +#misc +-%> +<% unless object.__product.operation_retry.nil? -%> +operation_retry: '<%= object.__product.operation_retry %>' +<% end -%> diff --git a/mmv1/templates/terraform/state_migrations/certificate_manager_dns_authorization.go.erb b/mmv1/templates/terraform/state_migrations/certificate_manager_dns_authorization.go.erb new file mode 100644 index 000000000000..b3d29dc64136 --- /dev/null +++ b/mmv1/templates/terraform/state_migrations/certificate_manager_dns_authorization.go.erb @@ -0,0 +1,91 @@ +func ResourceCertificateManagerDnsAuthorizationUpgradeV0(_ context.Context, rawState map[string]interface{}, meta interface{}) (map[string]interface{}, error) { + log.Printf("[DEBUG] Attributes before migration: %#v", rawState) + // Version 0 didn't support location. Default it to global. + rawState["location"] = "global" + log.Printf("[DEBUG] Attributes after migration: %#v", rawState) + return rawState, nil +} + +func resourceCertificateManagerDnsAuthorizationResourceV0() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "domain": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `A domain which is being authorized. A DnsAuthorization resource covers a +single domain and its wildcard, e.g. authorization for "example.com" can +be used to issue certificates for "example.com" and "*.example.com".`, + }, + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `Name of the resource; provided by the client when the resource is created. +The name must be 1-64 characters long, and match the regular expression [a-zA-Z][a-zA-Z0-9_-]* which means the first character must be a letter, +and all following characters must be a dash, underscore, letter or digit.`, + }, + "description": { + Type: schema.TypeString, + Optional: true, + Description: `A human-readable description of the resource.`, + }, + "labels": { + Type: schema.TypeMap, + Optional: true, + Description: `Set of label tags associated with the DNS Authorization resource. + +**Note**: This field is non-authoritative, and will only manage the labels present in your configuration. +Please refer to the field 'effective_labels' for all of the labels present on the resource.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "dns_resource_record": { + Type: schema.TypeList, + Computed: true, + Description: `The structure describing the DNS Resource Record that needs to be added +to DNS configuration for the authorization to be usable by +certificate.`, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "data": { + Type: schema.TypeString, + Computed: true, + Description: `Data of the DNS Resource Record.`, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: `Fully qualified name of the DNS Resource Record. +E.g. '_acme-challenge.example.com'.`, + }, + "type": { + Type: schema.TypeString, + Computed: true, + Description: `Type of the DNS Resource Record.`, + }, + }, + }, + }, + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource + and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + } + +} \ No newline at end of file diff --git a/mmv1/templates/terraform/yaml_conversion.erb b/mmv1/templates/terraform/yaml_conversion.erb new file mode 100644 index 000000000000..e89df4397fd9 --- /dev/null +++ b/mmv1/templates/terraform/yaml_conversion.erb @@ -0,0 +1,550 @@ +# Copyright 2024 Google Inc. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Warning: This is a temporary file, and should not be edited directly +--- +<% +#common attrs +-%> +name: '<%= object.name %>' +<% unless object.kind.nil? -%> +kind: '<%= object.kind %>' +<% end -%> +<% unless object.legacy_name.nil? -%> +legacy_name: '<%= object.legacy_name %>' +<% end -%> +description: | + <%= object.description.gsub(/\n/, "\n ") %> +<% unless object.min_version.nil? -%> +<% unless object.min_version.name == 'ga' -%> +min_version: '<%= object.min_version.name %>' +<% end -%> +<% end -%> +<% unless !object.exclude_resource -%> +exclude_resource: <%= object.exclude_resource %> +<% end -%> +<% unless !object.exclude -%> +exclude: <%= object.exclude %> +<% end -%> +<% unless object.readonly.nil? -%> +readonly: <%= object.readonly %> +<% end -%> +<% +#references blocks +-%> +<% unless object.references.nil? -%> +references: +<% unless object.references.guides.nil? -%> + guides: +<% object.references.guides.each do |title, link| -%> + '<%= title -%>': '<%= link %>' +<% end -%> +<% end -%> +<% unless object.references.api.nil? -%> + api: '<%= object.references.api %>' +<% end -%> +<% end -%> +<% +#docs blocks +-%> +<% unless object.docs.nil? -%> +docs: +<% unless object.docs.warning.nil? -%> + warning: '<%= object.docs.warning %>' +<% end -%> +<% unless object.docs.note.nil? -%> + note: '<%= object.docs.note %>' +<% end -%> +<% unless object.docs.required_properties.nil? -%> + required_properties: '<%= object.docs.required_properties %>' +<% end -%> +<% unless object.docs.optional_properties.nil? -%> + optional_properties: '<%= object.docs.optional_properties %>' +<% end -%> +<% unless object.docs.attributes.nil? -%> + attributes: '<%= object.docs.attributes %>' +<% end -%> +<% end -%> +<% +#url/http attrs +-%> +<% unless object.id_format.nil? -%> +id_format: '<%= object.id_format %>' +<% end -%> +<% unless object.base_url.nil? -%> +base_url: '<%= object.base_url %>' +<% end -%> +<% unless object.cai_base_url.nil? -%> +cai_base_url: '<%= object.cai_base_url %>' +<% end -%> +<% unless object.self_link.nil? -%> +self_link: '<%= object.self_link %>' +<% end -%> +<% unless !object.has_self_link -%> +has_self_link: <%= object.has_self_link %> +<% end -%> +<% unless object.create_url.nil? -%> +create_url: '<%= object.create_url %>' +<% end -%> +<% unless object.create_verb.to_s == 'POST' -%> +create_verb: '<%= object.create_verb.to_s %>' +<% end -%> +<% unless object.update_url.nil? -%> +update_url: '<%= object.update_url %>' +<% end -%> +<% unless object.update_verb.to_s == 'PUT' -%> +update_verb: '<%= object.update_verb.to_s %>' +<% end -%> +<% unless object.update_mask.nil? -%> +update_mask: <%= object.update_mask %> +<% end -%> +<% unless object.read_verb.to_s == 'GET' -%> +read_verb: '<%= object.read_verb.to_s %>' +<% end -%> +<% unless object.read_query_params.nil? %> +read_query_params: '<%= object.read_query_params %>' +<% end -%> +<% unless !object.skip_read -%> +skip_read: <%= object.skip_read %> +<% end -%> +<% unless object.delete_url.nil? -%> +delete_url: '<%= object.delete_url %>' +<% end -%> +<% unless object.delete_verb.to_s == 'DELETE' -%> +delete_verb: '<%= object.delete_verb.to_s %>' +<% end -%> +<% unless !object.skip_delete -%> +skip_delete: <%= object.skip_delete %> +<% end -%> +<% unless object.immutable.nil? -%> +immutable: <%= object.immutable %> +<% end -%> +<% unless object.mutex.nil? -%> +mutex: <%= object.mutex %> +<% end -%> +<% +#import +-%> +<% unless object.import_format.empty? -%> +import_format: +<% object.import_format.each do |iformat| -%> + - '<%= iformat %>' +<% end -%> +<% end -%> +<% unless !object.exclude_import -%> +exclude_import: <%= object.exclude_import %> +<% end -%> +<% +#timeouts +-%> +<% unless object.timeouts.nil? -%> +timeouts: +<% unless object.timeouts.insert_minutes.nil? -%> + insert_minutes: <%= object.timeouts.insert_minutes %> +<% end -%> +<% unless object.timeouts.update_minutes.nil? -%> + update_minutes: <%= object.timeouts.update_minutes %> +<% end -%> +<% unless object.timeouts.delete_minutes.nil? -%> + delete_minutes: <%= object.timeouts.delete_minutes %> +<% end -%> +<% end -%> +<% +#async +-%> +<% unless !object.autogen_async -%> +autogen_async: <%= object.autogen_async %> +<% end -%> +<% unless object.async.nil? -%> +async: +<% if object.async.is_a? Provider::Terraform::PollAsync -%> +<% unless object.async.check_response_func_existence.nil? -%> + check_response_func_existence: '<%= object.async.check_response_func_existence %>' +<% end -%> +<% unless object.async.check_response_func_absence.nil? -%> + check_response_func_absence: '<%= object.async.check_response_func_absence %>' +<% end -%> +<% unless object.async.custom_poll_read.nil? -%> + custom_poll_read: '<%= object.async.custom_poll_read %>' +<% end -%> +<% unless object.async.suppress_error.nil? -%> + suppress_error: <%= object.async.suppress_error %> +<% end -%> +<% unless object.async.target_occurrences.nil? -%> + target_occurrences: <%= object.async.target_occurrences %> +<% end -%> +<% end -%> +<% if object.async.is_a? Api::OpAsync -%> +<% #async.operation %> +<% unless object.async.operation.nil? -%> + operation: +<% unless object.async.operation.base_url.nil? -%> + base_url: '<%= object.async.operation.base_url %>' +<% end -%> +<% unless object.async.operation.full_url.nil? -%> + full_url: '<%= object.async.operation.full_url %>' +<% end -%> +<% unless object.async.operation.kind.nil? -%> + kind: '<%= object.async.operation.kind %>' +<% end -%> +<% unless object.async.operation.path.nil? -%> + path: '<%= object.async.operation.path %>' +<% end -%> +<% unless object.async.operation.wait_ms.nil? -%> + wait_ms: <%= object.async.operation.wait_ms %> +<% end -%> +<% #async.operation.timeouts %> +<% unless object.async.operation.timeouts.nil? -%> + timeouts: +<% unless object.async.operation.timeouts.insert_minutes.nil? -%> + insert_minutes: <%= object.async.operation.timeouts.insert_minutes %> +<% end -%> +<% unless object.async.operation.timeouts.update_minutes.nil? -%> + update_minutes: <%= object.async.operation.timeouts.update_minutes %> +<% end -%> +<% unless object.async.operation.timeouts.delete_minutes.nil? -%> + delete_minutes: <%= object.async.operation.timeouts.delete_minutes %> +<% end -%> +<% end -%> +<% end -%> +<% #async.result %> +<% unless object.async.result.nil? -%> + result: +<% unless object.async.result.path.nil? -%> + path: '<%= object.async.result.path %>' +<% end -%> +<% unless object.async.result.resource_inside_response.nil? -%> + resource_inside_response: <%= object.async.result.resource_inside_response %> +<% end -%> +<% end -%> +<% #async.error %> +<% unless object.async.error.nil? -%> + error: +<% unless object.async.error.path.nil? -%> + path: '<%= object.async.error.path %>' +<% end -%> +<% unless object.async.error.message.nil? -%> + message: '<%= object.async.error.message %>' +<% end -%> +<% end -%> +<% end -%> +<% end -%> +<% +#collection/identity url +-%> +<% unless object.collection_url_key == object.name.plural.camelize(:lower) -%> +collection_url_key: '<%= object.collection_url_key %>' +<% end -%> +<% unless object.nested_query.nil? -%> +nested_query: +<% unless object.nested_query.keys.nil? -%> + keys: +<% object.nested_query.keys.each do |key| %> + - <%= key -%> +<% end -%> +<% end -%> +<% unless object.nested_query.is_list_of_ids.nil? -%> + is_list_of_ids: <%= object.nested_query.is_list_of_ids %> +<% end -%> +<% unless object.nested_query.modify_by_patch.nil? -%> + modify_by_patch: <%= object.nested_query.modify_by_patch %> +<% end -%> +<% end -%> +<% +#IAM +-%> +<% unless object.iam_policy.nil? -%> +iam_policy: +<% unless !object.iam_policy.exclude -%> + exclude: <%= object.iam_policy.exclude %> +<% end -%> +<% unless !object.iam_policy.exclude_tgc -%> + exclude_tgc: <%= object.iam_policy.exclude_tgc %> +<% end -%> +<% unless !object.iam_policy.skip_import_test -%> + skip_import_test: <%= object.iam_policy.skip_import_test %> +<% end -%> +<% unless object.iam_policy.method_name_separator == '/' -%> + method_name_separator: '<%= object.iam_policy.method_name_separator %>' +<% end -%> +<% unless object.iam_policy.parent_resource_type.nil? -%> + parent_resource_type: '<%= object.iam_policy.parent_resource_type %>' +<% end -%> +<% unless object.iam_policy.fetch_iam_policy_verb.to_s == 'GET' -%> + fetch_iam_policy_verb: '<%= object.iam_policy.fetch_iam_policy_verb.to_s %>' +<% end -%> +<% unless object.iam_policy.fetch_iam_policy_method == 'getIamPolicy' -%> + fetch_iam_policy_method: '<%= object.iam_policy.fetch_iam_policy_method %>' +<% end -%> +<% unless object.iam_policy.set_iam_policy_verb.to_s == 'POST' -%> + set_iam_policy_verb: '<%= object.iam_policy.set_iam_policy_verb.to_s %>' +<% end -%> +<% unless object.iam_policy.set_iam_policy_method == 'setIamPolicy' -%> + set_iam_policy_method: '<%= object.iam_policy.set_iam_policy_method %>' +<% end -%> +<% unless object.iam_policy.wrapped_policy_obj -%> + wrapped_policy_obj: <%= object.iam_policy.wrapped_policy_obj %> +<% end -%> +<% unless object.iam_policy.allowed_iam_role == 'roles/viewer' -%> + allowed_iam_role: '<%= object.iam_policy.allowed_iam_role %>' +<% end -%> +<% unless object.iam_policy.admin_iam_role.nil? -%> + admin_iam_role: '<%= object.iam_policy.admin_iam_role %>' +<% end -%> +<% unless object.iam_policy.parent_resource_attribute == 'id' -%> + parent_resource_attribute: '<%= object.iam_policy.parent_resource_attribute %>' +<% end -%> +<% unless object.iam_policy.test_project_name.nil? -%> + test_project_name: '<%= object.iam_policy.test_project_name %>' +<% end -%> +<% unless object.iam_policy.iam_conditions_request_type.nil? -%> + iam_conditions_request_type: '<%= object.iam_policy.iam_conditions_request_type %>' +<% end -%> +<% unless object.iam_policy.base_url.nil? -%> + base_url: '<%= object.iam_policy.base_url %>' +<% end -%> +<% unless object.iam_policy.self_link.nil? -%> + self_link: '<%= object.iam_policy.self_link %>' +<% end -%> +<% unless object.iam_policy.import_format.nil? -%> + import_format: +<% object.iam_policy.import_format.each do |iformat| -%> + - '<%= iformat %>' +<% end -%> +<% end -%> +<% unless object.iam_policy.iam_policy_version.nil? -%> + iam_policy_version: '<%= object.iam_policy.iam_policy_version %>' +<% end -%> +<% unless object.iam_policy.min_version.nil? -%> + min_version: '<%= object.iam_policy.min_version %>' +<% end -%> +<% unless object.iam_policy.substitute_zone_value -%> + substitute_zone_value: <%= object.iam_policy.substitute_zone_value %> +<% end -%> +<% end -%> +<% +#custom code +-%> +<% unless object.custom_code.nil? -%> +custom_code: +<% unless object.custom_code.extra_schema_entry.nil? -%> + extra_schema_entry: '<%= object.custom_code.extra_schema_entry %>' +<% end -%> +<% unless object.custom_code.constants.nil? -%> + constants: '<%= object.custom_code.constants %>' +<% end -%> +<% unless object.custom_code.encoder.nil? -%> + encoder: '<%= object.custom_code.encoder %>' +<% end -%> +<% unless object.custom_code.update_encoder.nil? -%> + update_encoder: '<%= object.custom_code.update_encoder %>' +<% end -%> +<% unless object.custom_code.decoder.nil? -%> + decoder: '<%= object.custom_code.decoder %>' +<% end -%> +<% unless object.custom_code.pre_create.nil? -%> + pre_create: '<%= object.custom_code.pre_create %>' +<% end -%> +<% unless object.custom_code.post_create.nil? -%> + post_create: '<%= object.custom_code.post_create %>' +<% end -%> +<% unless object.custom_code.custom_create.nil? -%> + custom_create: '<%= object.custom_code.custom_create %>' +<% end -%> +<% unless object.custom_code.pre_read.nil? -%> + pre_read: '<%= object.custom_code.pre_read %>' +<% end -%> +<% unless object.custom_code.pre_update.nil? -%> + pre_update: '<%= object.custom_code.pre_update %>' +<% end -%> +<% unless object.custom_code.post_update.nil? -%> + post_update: '<%= object.custom_code.post_update %>' +<% end -%> +<% unless object.custom_code.custom_update.nil? -%> + custom_update: '<%= object.custom_code.custom_update %>' +<% end -%> +<% unless object.custom_code.pre_delete.nil? -%> + pre_delete: '<%= object.custom_code.pre_delete %>' +<% end -%> +<% unless object.custom_code.custom_import.nil? -%> + custom_import: '<%= object.custom_code.custom_import %>' +<% end -%> +<% unless object.custom_code.post_import.nil? -%> + post_import: '<%= object.custom_code.post_import %>' +<% end -%> +<% unless object.custom_code.test_check_destroy.nil? -%> + test_check_destroy: '<%= object.custom_code.test_check_destroy %>' +<% end -%> +<% end -%> +<% unless object.custom_diff.empty? || (object.custom_diff.size == 1 && object.custom_diff.include?("tpgresource.SetLabelsDiff")) -%> +custom_diff: +<% object.custom_diff.each do |cdiff| -%> + - '<%= cdiff %>' +<% end -%> +<% end -%> +<% unless !object.skip_default_cdiff -%> +skip_default_cdiff: <%= object.skip_default_cdiff %> +<% end -%> +<% +#terraform overrides +-%> +<% unless object.filename_override.nil? -%> +filename_override: '<%= object.filename_override %>' +<% end -%> +<% unless object.exclude_tgc.nil? -%> +exclude_tgc: <%= object.exclude_tgc %> +<% end -%> +<% unless !object.skip_sweeper -%> +skip_sweeper: <%= object.skip_sweeper %> +<% end -%> +<% unless object.error_retry_predicates.nil? -%> +error_retry_predicates: +<% object.error_retry_predicates.each do |erpred| %> + - '<%= erpred -%>' +<% end -%> +<% end -%> +<% unless object.error_abort_predicates.nil? -%> +error_abort_predicates: +<% object.error_abort_predicates.each do |eapred| %> + - '<%= eapred -%>' +<% end -%> +<% end -%> +<% unless object.schema_version.nil? -%> +schema_version: <%= object.schema_version %> +<% end -%> +<% unless object.state_upgrade_base_schema_version == 0 -%> +state_upgrade_base_schema_version: <%= object.state_upgrade_base_schema_version %> +<% end -%> +<% unless !object.state_upgraders -%> +state_upgraders: <%= object.state_upgraders %> +<% end -%> +<% unless object.migrate_state.nil? -%> +migrate_state: '<%= object.migrate_state %>' +<% end -%> +<% unless !object.legacy_long_form_project -%> +legacy_long_form_project: <%= object.legacy_long_form_project %> +<% end -%> +<% unless !object.supports_indirect_user_project_override -%> +supports_indirect_user_project_override: <%= object.supports_indirect_user_project_override %> +<% end -%> +<% unless object.read_error_transform.nil? -%> +read_error_transform: '<%= object.read_error_transform %>' +<% end -%> +<% unless !object.taint_resource_on_failed_create -%> +taint_resource_on_failed_create: <%= object.taint_resource_on_failed_create %> +<% end -%> +<% unless object.deprecation_message.nil? -%> +deprecation_message: '<%= object.deprecation_message %>' +<% end -%> +<% +#examples +-%> +<% unless object.examples.empty? -%> +examples: +<% object.examples.each do |example| -%> + - name: '<%= example.name %>' +<% unless example.config_path == "templates/terraform/examples/#{example.name}.tf.erb" -%> + config_path: '<%= example.config_path %>' +<% end -%> +<% unless example.primary_resource_id.nil? -%> + primary_resource_id: '<%= example.primary_resource_id %>' +<% end -%> +<% unless example.primary_resource_id.nil? -%> + primary_resource_name: '<%= example.primary_resource_id %>' +<% end -%> +<% unless example.min_version.nil? -%> + min_version: '<%= example.min_version %>' +<% end -%> +<% unless example.vars.nil? -%> +<% unless example.vars.empty? -%> + vars: +<% example.vars.each do |vname, val| -%> + <%= vname -%>: '<%= val %>' +<% end -%> +<% end -%> +<% end -%> +<% unless example.test_env_vars.nil? -%> +<% unless example.test_env_vars.empty? -%> + test_env_vars: +<% example.test_env_vars.each do |vname, val| -%> + <%= vname -%>: '<%= val %>' +<% end -%> +<% end -%> +<% end -%> +<% unless example.test_vars_overrides.nil? -%> +<% unless example.test_vars_overrides.empty? -%> + test_vars_overrides: +<% example.test_vars_overrides.each do |vname, val| -%> + '<%= vname -%>': '<%= val %>' +<% end -%> +<% end -%> +<% end -%> +<% unless example.ignore_read_extra.empty? -%> + ignore_read_extra: +<% example.ignore_read_extra.each do |irextra| -%> + '<%= irextra %>' +<% end -%> +<% end -%> +<% unless !example.pull_external -%> + pull_external: <%= example.pull_external %> +<% end -%> +<% unless example.skip_test.nil? -%> + skip_test: <%= example.skip_test %> +<% end -%> +<% unless example.skip_import_test.nil? -%> + skip_import_test: <%= example.skip_import_test %> +<% end -%> +<% unless example.skip_docs.nil? -%> + skip_docs: <%= example.skip_docs %> +<% end -%> +<% unless example.skip_vcr.nil? -%> + skip_vcr: <%= example.skip_vcr %> +<% end -%> +<% end -%> +<% end -%> +<% +#virtual fields +-%> +<% unless object.virtual_fields.empty? -%> +virtual_fields: +<% object.virtual_fields.each do |vfield| -%> + - name: '<%= vfield.name %>' + description: '<%= vfield.description %>' +<% unless vfield.type.nil? -%> + type: <%= tf_type(vfield.type) %> +<% end -%> +<% unless vfield.default_value.nil? -%> + default_value: <%= go_literal(vfield.default_value) %> +<% end -%> +<% unless vfield.immutable.nil? -%> + immutable: <%= vfield.immutable %> +<% end -%> +<% end -%> +<% end -%> +<% +#fields +-%> +<% unless object.parameters.nil? -%> +parameters: +<% object.parameters.each do |prop| -%> +<%= lines(build_newyaml_field(prop, object, pwd)) -%> +<% end -%> +<% end -%> +<% unless object.properties.nil? -%> +properties: +<% object.properties.each do |prop| -%> +<%= lines(build_newyaml_field(prop, object, pwd)) -%> +<% end -%> +<% end -%> +<%# end -%> + diff --git a/mmv1/templates/terraform/yaml_conversion_field.erb b/mmv1/templates/terraform/yaml_conversion_field.erb new file mode 100644 index 000000000000..0ce6832e4fd7 --- /dev/null +++ b/mmv1/templates/terraform/yaml_conversion_field.erb @@ -0,0 +1,210 @@ +<% indent_spaces = 2 -%> +<% unless property.class.to_s == 'Api::Type::KeyValueTerraformLabels' || property.class.to_s == 'Api::Type::KeyValueEffectiveLabels' -%> + - name: '<%= property.name -%>' + type: <%= property.class.to_s.gsub("Api::Type::", "") %> +<% unless property.description.nil? -%> + description: "<%= property.description.strip.gsub('"', '\'') -%>" +<% end -%> +<% unless !property.unordered_list -%> + unordered_list: <%= property.unordered_list %> +<% end -%> +<% unless !property.is_set -%> + is_set: <%= property.is_set %> +<% end -%> +<% unless !property.schema_config_mode_attr -%> + schema_config_mode_attr: <%= property.schema_config_mode_attr %> +<% end -%> +<% unless property.pattern.nil? -%> + pattern: '<%= property.pattern %>' +<% end -%> +<% unless !property.exclude -%> + exclude: <%= property.exclude %> +<% end -%> +<% unless property.__resource.nil? -%> +<% unless property.min_version.name == 'ga'-%> + min_version: '<%= property.min_version.name %>' +<% end -%> +<% end -%> +<% unless property.exact_version.nil? -%> + exact_version: '<%= property.exact_version %>' +<% end -%> +<% unless property.url_param_only.nil? -%> + url_param_only: <%= property.url_param_only %> +<% end -%> +<% unless property.required.nil? -%> + required: <%= property.required %> +<% end -%> +<% unless property.immutable.nil? -%> + immutable: <%= property.immutable %> +<% end -%> +<% unless property.ignore_read.nil? -%> + ignore_read: <%= property.ignore_read %> +<% end -%> +<% unless !property.sensitive -%> + sensitive: <%= property.sensitive %> +<% end -%> +<% unless !property.default_from_api -%> + default_from_api: <%= property.default_from_api %> +<% end -%> +<% unless !property.output -%> + output: <%= property.output %> +<% end -%> +<% unless property.send_empty_value.nil? -%> + send_empty_value: <%= property.send_empty_value %> +<% end -%> +<% unless property.allow_empty_object.nil? -%> + allow_empty_object: <%= property.allow_empty_object %> +<% end -%> +<% unless property.read_query_params.nil? -%> + read_query_params: '<%= property.read_query_params %>' +<% end -%> +<% unless property.update_url.nil? -%> + update_url: '<%= property.update_url %>' +<% end -%> +<% unless property.update_verb == property.__resource&.update_verb -%> + update_verb: '<%= property.update_verb.to_s %>' +<% end -%> +<% unless property.update_id.nil? -%> + update_id: '<%= property.update_id %>' +<% end -%> +<% unless property.update_mask_fields.nil? -%> + update_mask_fields: +<% property.update_mask_fields.each do |fname| -%> + - '<%= fname %>' +<% end -%> +<% end -%> +<% unless property.fingerprint_name.nil? -%> + fingerprint_name: '<%= property.fingerprint_name %>' +<% end -%> +<% unless property.conflicts.nil? -%> +<% unless property.conflicts.empty? -%> + conflicts: +<% property.conflicts.each do |fname| -%> + - <%= fname %> +<% end -%> +<% end -%> +<% end -%> +<% unless property.at_least_one_of.nil? -%> +<% unless property.at_least_one_of.empty? -%> + at_least_one_of: +<% property.at_least_one_of.each do |fname| -%> + - '<%= fname %>' +<% end -%> +<% end -%> +<% end -%> +<% unless property.exactly_one_of.nil? -%> +<% unless property.exactly_one_of.empty? -%> + exactly_one_of: +<% property.exactly_one_of.each do |fname| -%> + - '<%= fname %>' +<% end -%> +<% end -%> +<% end -%> +<% unless property.required_with.nil? -%> +<% unless property.required_with.empty? -%> + required_with: +<% property.required_with.each do |fname| -%> + - '<%= fname %>' +<% end -%> +<% end -%> +<% end -%> +<% unless property.key_expander == 'tpgresource.ExpandString' -%> + key_expander: '<%= property.key_expander %>' +<% end -%> +<% unless property.key_diff_suppress_func.nil? -%> + key_diff_suppress_func: '<%= property.key_diff_suppress_func %>' +<% end -%> +<% unless property.diff_suppress_func.nil? -%> + diff_suppress_func: '<%= property.diff_suppress_func %>' +<% end -%> +<% unless property.state_func.nil? -%> + state_func: '<%= property.state_func %>' +<% end -%> +<% unless property.set_hash_func.nil? -%> + set_hash_func: '<%= property.set_hash_func %>' +<% end -%> +<% unless property.custom_flatten.nil? -%> + custom_flatten: '<%= property.custom_flatten %>' +<% end -%> +<% unless property.custom_expand.nil? -%> + custom_expand: '<%= property.custom_expand %>' +<% end -%> +<% unless property.flatten_object.nil? -%> + flatten_object: '<%= property.flatten_object %>' +<% end -%> +<% unless property.validation.nil? -%> + validation: +<% unless property.validation.regex.nil? -%> + regex: '<%= property.validation.regex %>' +<% end -%> +<% unless property.validation.function.nil? -%> + function: '<%= property.validation.function %>' +<% end -%> +<% end -%> +<% unless property.default_value.nil? -%> + default_value: <%= property.default_value %> +<% end -%> +<% unless property.deprecation_message.nil? -%> + deprecation_message: '<%= property.deprecation_message %>' +<% end -%> +<% unless property.removed_message.nil? -%> + removed_message: '<%= property.removed_message %>' +<% end -%> +<% if property.is_a?(Api::Type::Array) -%> +<% if property.item_type.is_a?(Api::Type::NestedObject) -%> + item_type: <%= property.item_type.type.to_s %> +<% unless property.item_type.properties.nil? -%> + properties: +<% property.item_type.properties.each do |prop| -%> +<%= lines(indent(build_newyaml_field(prop, object, pwd), 4)) -%> +<% end -%> +<% end -%> +<% else -%> + item_type: <%= property.item_type.to_s %> +<% end -%> +<% unless property.min_size.nil? -%> + min_size: <%= property.min_size %> +<% end -%> +<% unless property.max_size.nil? -%> + max_size: <%= property.max_size %> +<% end -%> +<% end -%> +<% if property.is_a?(Api::Type::ResourceRef) -%> +<% unless property.resource.nil? -%> + resource: '<%= property.resource %>' +<% end -%> +<% unless property.imports.nil? -%> + imports: '<%= property.imports.to_s %>' +<% end -%> +<% end -%> +<% if property.is_a?(Api::Type::Enum) -%> +<% unless property.values.nil? -%> + enum_values: +<% property.values.each do |enumval| -%> + - '<%= enumval %>' +<% end -%> +<% end -%> +<% unless property.skip_docs_values.nil? -%> + skip_docs_values: <%= property.skip_docs_values %> +<% end -%> +<% end -%> +<% if property.is_a?(Api::Type::Map) -%> +<% unless property.value_type.nil? -%> + value_type: '<%= property.value_type.to_s %>' +<% end -%> +<% unless property.key_name.nil? -%> + key_name: '<%= property.key_name %>' +<% end -%> +<% unless property.key_description.nil? -%> + key_description: '<%= property.key_description %>' +<% end -%> +<% end -%> +<% if property.is_a?(Api::Type::NestedObject) -%> +<% unless property.properties.nil? -%> + properties: +<% property.properties.each do |prop| -%> +<%= lines(indent(build_newyaml_field(prop, object, pwd), 4)) -%> +<% end -%> +<% end -%> +<% end -%> +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/go.mod b/mmv1/third_party/go.mod new file mode 100644 index 000000000000..8b4fa24a554a --- /dev/null +++ b/mmv1/third_party/go.mod @@ -0,0 +1,2 @@ +// This is an empty go.mod to ensure packages required +// in this folder are not considered part of tpgtools modules. \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt index 21d956030d68..395dc4da4a9c 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_configuration_per_package.kt @@ -15,6 +15,8 @@ import jetbrains.buildServer.configs.kotlin.sharedResources import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot import replaceCharsId +// BuildConfigurationsForPackages accepts a map containing details of multiple packages in a provider and returns a list of build configurations for them all. +// Intended to be used in projects where we're testing all packages, e.g. the nightly test projects fun BuildConfigurationsForPackages(packages: Map>, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration): List { val list = ArrayList() @@ -31,6 +33,13 @@ fun BuildConfigurationsForPackages(packages: Map>, p return list } +// BuildConfigurationForSinglePackage accepts details of a single package in a provider and returns a build configuration for it +// Intended to be used in short-lived projects where we're testing specific packages, e.g. feature branch testing +fun BuildConfigurationForSinglePackage(packageName: String, packagePath: String, packageDisplayName: String, providerName: String, parentProjectName: String, vcsRoot: GitVcsRoot, sharedResources: List, environmentVariables: AccTestConfiguration): BuildType{ + val pkg = PackageDetails(packageName, packageDisplayName, providerName, parentProjectName) + return pkg.buildConfiguration(packagePath, vcsRoot, sharedResources, environmentVariables) +} + class PackageDetails(private val packageName: String, private val displayName: String, private val providerName: String, private val parentProjectName: String) { // buildConfiguration returns a BuildType for a service package @@ -102,4 +111,4 @@ class PackageDetails(private val packageName: String, private val displayName: S var id = "%s_%s_PACKAGE_%s".format(this.parentProjectName, this.providerName, this.packageName) return replaceCharsId(id) } -} \ No newline at end of file +} diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt index 88928ed37a23..7641bc858592 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_parameters.kt @@ -252,12 +252,20 @@ fun ParametrizedWithType.readOnlySettings() { } // ParametrizedWithType.terraformCoreBinaryTesting sets environment variables that control what Terraform version is downloaded -// and ensures the testing framework uses that downloaded version -fun ParametrizedWithType.terraformCoreBinaryTesting() { - text("env.TERRAFORM_CORE_VERSION", DefaultTerraformCoreVersion, "The version of Terraform Core which should be used for testing") +// and ensures the testing framework uses that downloaded version. The default Terraform core version is used if no argument is supplied. +fun ParametrizedWithType.terraformCoreBinaryTesting(tfVersion: String = DefaultTerraformCoreVersion) { + text("env.TERRAFORM_CORE_VERSION", tfVersion, "The version of Terraform Core which should be used for testing") hiddenVariable("env.TF_ACC_TERRAFORM_PATH", "%system.teamcity.build.checkoutDir%/tools/terraform", "The path where the Terraform Binary is located. Used by the testing framework.") } +// BuildType.overrideTerraformCoreVersion is used to override the value of TERRAFORM_CORE_VERSION in special cases where we're testing new features +// that rely on a specific version of Terraform we might not want to be used for all our tests in TeamCity. +fun BuildType.overrideTerraformCoreVersion(tfVersion: String){ + params { + terraformCoreBinaryTesting(tfVersion) + } +} + fun ParametrizedWithType.terraformShouldPanicForSchemaErrors() { hiddenVariable("env.TF_SCHEMA_PANIC_ON_ERROR", "1", "Panic if unknown/unmatched fields are set into the state") } diff --git a/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt b/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt index 4faed1046b65..4682f1240eb9 100644 --- a/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt +++ b/mmv1/third_party/terraform/.teamcity/components/builds/build_steps.kt @@ -64,14 +64,15 @@ fun BuildSteps.downloadTerraformBinary() { // https://releases.hashicorp.com/terraform/0.12.28/terraform_0.12.28_linux_amd64.zip val terraformUrl = "https://releases.hashicorp.com/terraform/%env.TERRAFORM_CORE_VERSION%/terraform_%env.TERRAFORM_CORE_VERSION%_linux_amd64.zip" step(ScriptBuildStep { - name = "Download Terraform version %s".format(DefaultTerraformCoreVersion) + name = "Download Terraform" scriptContent = """ #!/bin/bash + echo "Downloading Terraform version %env.TERRAFORM_CORE_VERSION%" mkdir -p tools - wget -O tf.zip %s + wget -O tf.zip $terraformUrl unzip tf.zip mv terraform tools/ - """.format(terraformUrl).trimIndent() + """.trimIndent() }) } @@ -118,7 +119,7 @@ fun BuildSteps.runAcceptanceTests() { exit 0 fi - export TEST_COUNT=${'$'}(./test-binary -test.list=%TEST_PREFIX% | wc -l) + export TEST_COUNT=${'$'}(./test-binary -test.list="%TEST_PREFIX%" | wc -l) echo "Found ${'$'}{TEST_COUNT} tests that match the given test prefix %TEST_PREFIX%" if test ${'$'}TEST_COUNT -le "0"; then echo "Skipping test execution; no tests to run" diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/packages.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/packages.kt index 5ea3d2a5d48a..2e4b11ffb516 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/packages.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/packages.kt @@ -13,6 +13,11 @@ var PackagesListGa = mapOf( "displayName" to "Environment Variables", "path" to "./google/envvar" ), + "functions" to mapOf( + "name" to "functions", + "displayName" to "Provider-Defined Functions", + "path" to "./google/functions" + ), "fwmodels" to mapOf( "name" to "fwmodels", "displayName" to "Framework Models", @@ -64,6 +69,11 @@ var PackagesListBeta = mapOf( "displayName" to "Environment Variables", "path" to "./google-beta/envvar" ), + "functions" to mapOf( + "name" to "functions", + "displayName" to "Provider-Defined Functions", + "path" to "./google-beta/functions" + ), "fwmodels" to mapOf( "name" to "fwmodels", "displayName" to "Framework Models", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt index 27550dabce13..443987885a6f 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_beta.kt @@ -48,6 +48,11 @@ var ServicesListBeta = mapOf( "displayName" to "Appengine", "path" to "./google-beta/services/appengine" ), + "apphub" to mapOf( + "name" to "apphub", + "displayName" to "Apphub", + "path" to "./google-beta/services/apphub" + ), "artifactregistry" to mapOf( "name" to "artifactregistry", "displayName" to "Artifactregistry", diff --git a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt index 51f98ed08a05..08ce6c2ee82f 100644 --- a/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt +++ b/mmv1/third_party/terraform/.teamcity/components/inputs/services_ga.kt @@ -48,6 +48,11 @@ var ServicesListGa = mapOf( "displayName" to "Appengine", "path" to "./google/services/appengine" ), + "apphub" to mapOf( + "name" to "apphub", + "displayName" to "Apphub", + "path" to "./google/services/apphub" + ), "artifactregistry" to mapOf( "name" to "artifactregistry", "displayName" to "Artifactregistry", diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-provider-functions.kt b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-provider-functions.kt new file mode 100644 index 000000000000..3c1752e227f7 --- /dev/null +++ b/mmv1/third_party/terraform/.teamcity/components/projects/feature_branches/FEATURE-BRANCH-provider-functions.kt @@ -0,0 +1,102 @@ +/* + * Copyright (c) HashiCorp, Inc. + * SPDX-License-Identifier: MPL-2.0 + */ + +// This file is controlled by MMv1, any changes made here will be overwritten + +package projects.feature_branches + +import ProviderNameBeta +import ProviderNameGa +import builds.* +import generated.PackagesListBeta +import generated.PackagesListGa +import jetbrains.buildServer.configs.kotlin.Project +import jetbrains.buildServer.configs.kotlin.vcs.GitVcsRoot +import replaceCharsId +import vcs_roots.ModularMagicianVCSRootBeta +import vcs_roots.ModularMagicianVCSRootGa + +const val featureBranchProviderFunctionsName = "FEATURE-BRANCH-provider-functions" +const val providerFunctionsTfCoreVersion = "1.8.0-alpha20240228" + +// VCS Roots specifically for pulling code from the feature branches in the downstream and upstream repos +object HashicorpVCSRootGa_featureBranchProviderFunctions: GitVcsRoot({ + name = "VCS root for the hashicorp/terraform-provider-${ProviderNameGa} repo @ refs/heads/${featureBranchProviderFunctionsName}" + url = "https://github.com/hashicorp/terraform-provider-${ProviderNameGa}" + branch = "refs/heads/${featureBranchProviderFunctionsName}" + branchSpec = "" // empty as we'll access no other branches +}) + +object HashicorpVCSRootBeta_featureBranchProviderFunctions: GitVcsRoot({ + name = "VCS root for the hashicorp/terraform-provider-${ProviderNameBeta} repo @ refs/heads/${featureBranchProviderFunctionsName}" + url = "https://github.com/hashicorp/terraform-provider-${ProviderNameBeta}" + branch = "refs/heads/${featureBranchProviderFunctionsName}" + branchSpec = "" // empty as we'll access no other branches +}) + +fun featureBranchProviderFunctionSubProject(allConfig: AllContextParameters): Project { + + val projectId = replaceCharsId(featureBranchProviderFunctionsName) + + val packageName = "functions" // This project will contain only builds to test this single package + val sharedResourcesEmpty: List = listOf() // No locking when testing functions + val vcrConfig = getVcrAcceptanceTestConfig(allConfig) // Reused below for both MM testing build configs + val trigger = NightlyTriggerConfiguration() // Resued below for running tests against the downstream repos every night. + + var parentId: String // To be overwritten when each build config is generated below. + + // GA + val gaConfig = getGaAcceptanceTestConfig(allConfig) + // How to make only build configuration to the relevant package(s) + val functionPackageGa = PackagesListGa.getValue(packageName) + + // Enable testing using hashicorp/terraform-provider-google + parentId = "${projectId}_HC_GA" + val buildConfigHashiCorpGa = BuildConfigurationForSinglePackage(packageName, functionPackageGa.getValue("path"), "Provider-Defined Functions (GA provider, HashiCorp downstream)", ProviderNameGa, parentId, HashicorpVCSRootGa_featureBranchProviderFunctions, sharedResourcesEmpty, gaConfig) + buildConfigHashiCorpGa.addTrigger(trigger) + + // Enable testing using modular-magician/terraform-provider-google + parentId = "${projectId}_MM_GA" + val buildConfigModularMagicianGa = BuildConfigurationForSinglePackage(packageName, functionPackageGa.getValue("path"), "Provider-Defined Functions (GA provider, MM upstream)", ProviderNameGa, parentId, ModularMagicianVCSRootGa, sharedResourcesEmpty, vcrConfig) + + // Beta + val betaConfig = getBetaAcceptanceTestConfig(allConfig) + val functionPackageBeta = PackagesListBeta.getValue("functions") + + // Enable testing using hashicorp/terraform-provider-google-beta + parentId = "${projectId}_HC_BETA" + val buildConfigHashiCorpBeta = BuildConfigurationForSinglePackage(packageName, functionPackageBeta.getValue("path"), "Provider-Defined Functions (Beta provider, HashiCorp downstream)", ProviderNameBeta, parentId, HashicorpVCSRootBeta_featureBranchProviderFunctions, sharedResourcesEmpty, betaConfig) + buildConfigHashiCorpBeta.addTrigger(trigger) + + // Enable testing using modular-magician/terraform-provider-google-beta + parentId = "${projectId}_MM_BETA" + val buildConfigModularMagicianBeta = BuildConfigurationForSinglePackage(packageName, functionPackageBeta.getValue("path"), "Provider-Defined Functions (Beta provider, MM upstream)", ProviderNameBeta, parentId, ModularMagicianVCSRootBeta, sharedResourcesEmpty, vcrConfig) + + val allBuildConfigs = listOf(buildConfigHashiCorpGa, buildConfigModularMagicianGa, buildConfigHashiCorpBeta, buildConfigModularMagicianBeta) + + // Make these builds use a 1.8.0-ish version of TF core + allBuildConfigs.forEach{ b -> + b.overrideTerraformCoreVersion(providerFunctionsTfCoreVersion) + } + + return Project{ + id(projectId) + name = featureBranchProviderFunctionsName + description = "Subproject for testing feature branch $featureBranchProviderFunctionsName" + + // Register feature branch-specific VCS roots in the project + vcsRoot(HashicorpVCSRootGa_featureBranchProviderFunctions) + vcsRoot(HashicorpVCSRootBeta_featureBranchProviderFunctions) + + // Register all build configs in the project + allBuildConfigs.forEach{ b -> + buildType(b) + } + + params { + readOnlySettings() + } + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt index 0c130da8eca5..a78260dfe650 100644 --- a/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt +++ b/mmv1/third_party/terraform/.teamcity/components/projects/root_project.kt @@ -18,6 +18,7 @@ import generated.ServicesListBeta import generated.ServicesListGa import jetbrains.buildServer.configs.kotlin.Project import jetbrains.buildServer.configs.kotlin.sharedResource +import projects.feature_branches.featureBranchProviderFunctionSubProject // googleCloudRootProject returns a root project that contains a subprojects for the GA and Beta version of the // Google provider. There are also resources to help manage the test projects used for acceptance tests. @@ -57,10 +58,14 @@ fun googleCloudRootProject(allConfig: AllContextParameters): Project { } } + // Projects required for nightly testing, testing MM upstreams, and sweepers subProject(googleSubProjectGa(allConfig)) subProject(googleSubProjectBeta(allConfig)) subProject(projectSweeperSubProject(allConfig)) + // Feature branch-testing projects - these will be added and removed as needed + subProject(featureBranchProviderFunctionSubProject(allConfig)) + params { readOnlySettings() } diff --git a/mmv1/third_party/terraform/.teamcity/tests/nightly_tests_project.kt b/mmv1/third_party/terraform/.teamcity/tests/nightly_tests_project.kt index 57764788a5ac..af3d7b8a9b24 100644 --- a/mmv1/third_party/terraform/.teamcity/tests/nightly_tests_project.kt +++ b/mmv1/third_party/terraform/.teamcity/tests/nightly_tests_project.kt @@ -19,26 +19,13 @@ class NightlyTestProjectsTests { val project = googleCloudRootProject(testContextParameters()) // Find GA nightly test project - var gaProject: Project? = project.subProjects.find { p-> p.name == gaProjectName} - if (gaProject == null) { - Assert.fail("Could not find the Google (GA) project") - } - var gaNightlyTestProject: Project? = gaProject!!.subProjects.find { p-> p.name == nightlyTestsProjectName} - if (gaNightlyTestProject == null) { - Assert.fail("Could not find the Google (GA) Nightly Test project") - } + var gaNightlyTestProject = getSubProject(project, gaProjectName, nightlyTestsProjectName) // Find Beta nightly test project - var betaProject: Project? = project.subProjects.find { p-> p.name == betaProjectName} - if (betaProject == null) { - Assert.fail("Could not find the Google (Beta) project") - } - var betaNightlyTestProject: Project? = betaProject!!.subProjects.find { p-> p.name == nightlyTestsProjectName} - if (betaNightlyTestProject == null) { - Assert.fail("Could not find the Google (GA) Nightly Test project") - } + var betaNightlyTestProject = getSubProject(project, betaProjectName, nightlyTestsProjectName) - (gaNightlyTestProject!!.buildTypes + betaNightlyTestProject!!.buildTypes).forEach{bt -> + // Make assertions about builds in both nightly test projects + (gaNightlyTestProject.buildTypes + betaNightlyTestProject.buildTypes).forEach{bt -> assertTrue("Build configuration `${bt.name}` contains at least one trigger", bt.triggers.items.isNotEmpty()) // Look for at least one CRON trigger var found: Boolean = false diff --git a/mmv1/third_party/terraform/.teamcity/tests/sweepers.kt b/mmv1/third_party/terraform/.teamcity/tests/sweepers.kt index 1603aaeda776..c80b8ef7d003 100644 --- a/mmv1/third_party/terraform/.teamcity/tests/sweepers.kt +++ b/mmv1/third_party/terraform/.teamcity/tests/sweepers.kt @@ -18,7 +18,7 @@ import projects.googleCloudRootProject class SweeperTests { @Test - fun projectSweeperProjectDoesNotSkipProjectSweep() { + fun projectSweeperDoesNotSkipProjectSweep() { val project = googleCloudRootProject(testContextParameters()) // Find Project sweeper project @@ -37,30 +37,33 @@ class SweeperTests { } @Test - fun gaNightlyProjectServiceSweeperSkipsProjectSweep() { + fun serviceSweepersSkipProjectSweeper() { val project = googleCloudRootProject(testContextParameters()) // Find GA nightly test project - val gaProject: Project? = project.subProjects.find { p-> p.name == gaProjectName} - if (gaProject == null) { - Assert.fail("Could not find the Google (GA) project") - } - val gaNightlyTestProject: Project? = gaProject!!.subProjects.find { p-> p.name == nightlyTestsProjectName} - if (gaNightlyTestProject == null) { - Assert.fail("Could not find the Google (GA) Nightly Test project") - } + val gaNightlyTestProject = getSubProject(project, gaProjectName, nightlyTestsProjectName) + // Find GA MM Upstream project + val gaMmUpstreamProject = getSubProject(project, gaProjectName, mmUpstreamProjectName) - // Find sweeper inside - val sweeper: BuildType? = gaNightlyTestProject!!.buildTypes.find { p-> p.name == ServiceSweeperName} - if (sweeper == null) { - Assert.fail("Could not find the sweeper build in the Google (GA) Nightly Test project") + // Find Beta nightly test project + val betaNightlyTestProject = getSubProject(project, betaProjectName, nightlyTestsProjectName) + // Find Beta MM Upstream project + val betaMmUpstreamProject = getSubProject(project, betaProjectName, mmUpstreamProjectName) + + val allProjects: ArrayList = arrayListOf(gaNightlyTestProject, gaMmUpstreamProject, betaNightlyTestProject, betaMmUpstreamProject) + allProjects.forEach{ project -> + // Find sweeper inside + val sweeper: BuildType? = project.buildTypes.find { p-> p.name == ServiceSweeperName} + if (sweeper == null) { + Assert.fail("Could not find the sweeper build in the ${project.name} project") + } + + // For the project sweeper to be skipped, SKIP_PROJECT_SWEEPER needs a value + // See https://github.com/GoogleCloudPlatform/magic-modules/blob/501429790939717ca6dce76dbf4b1b82aef4e9d9/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_sweeper.go#L18-L26 + + val value = sweeper!!.params.findRawParam("env.SKIP_PROJECT_SWEEPER")!!.value + assertTrue("env.SKIP_PROJECT_SWEEPER is set to a non-empty string in the sweeper build in the ${project.name} project. This means project sweepers are skipped. Value = `${value}` ", value != "") } - - // For the project sweeper to be skipped, SKIP_PROJECT_SWEEPER needs a value - // See https://github.com/GoogleCloudPlatform/magic-modules/blob/501429790939717ca6dce76dbf4b1b82aef4e9d9/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_sweeper.go#L18-L26 - - val value = sweeper!!.params.findRawParam("env.SKIP_PROJECT_SWEEPER")!!.value - assertTrue("env.SKIP_PROJECT_SWEEPER is set to a non-empty string, so project sweepers are skipped. Value = `${value}` ", value != "") } @Test @@ -68,14 +71,8 @@ class SweeperTests { val project = googleCloudRootProject(testContextParameters()) // Find GA nightly test project - val gaProject: Project? = project.subProjects.find { p-> p.name == gaProjectName} - if (gaProject == null) { - Assert.fail("Could not find the Google (GA) project") - } - val gaNightlyTestProject: Project? = gaProject!!.subProjects.find { p-> p.name == nightlyTestsProjectName} - if (gaNightlyTestProject == null) { - Assert.fail("Could not find the Google (GA) Nightly Test project") - } + val gaNightlyTestProject = getSubProject(project, gaProjectName, nightlyTestsProjectName) + // Find sweeper inside val sweeper: BuildType? = gaNightlyTestProject!!.buildTypes.find { p-> p.name == ServiceSweeperName} @@ -93,41 +90,7 @@ class SweeperTests { val project = googleCloudRootProject(testContextParameters()) // Find Beta nightly test project - val betaProject: Project? = project.subProjects.find { p-> p.name == betaProjectName} - if (betaProject == null) { - Assert.fail("Could not find the Google (GA) project") - } - val betaNightlyTestProject: Project? = betaProject!!.subProjects.find { p-> p.name == nightlyTestsProjectName} - if (betaNightlyTestProject == null) { - Assert.fail("Could not find the Google (GA) Nightly Test project") - } - - // Find sweeper inside - val sweeper: BuildType? = betaNightlyTestProject!!.buildTypes.find { p-> p.name == ServiceSweeperName} - if (sweeper == null) { - Assert.fail("Could not find the sweeper build in the Google (GA) Nightly Test project") - } - - // For the project sweeper to be skipped, SKIP_PROJECT_SWEEPER needs a value - // See https://github.com/GoogleCloudPlatform/magic-modules/blob/501429790939717ca6dce76dbf4b1b82aef4e9d9/mmv1/third_party/terraform/services/resourcemanager/resource_google_project_sweeper.go#L18-L26 - - val value = sweeper!!.params.findRawParam("env.SKIP_PROJECT_SWEEPER")!!.value - assertTrue("env.SKIP_PROJECT_SWEEPER is set to a non-empty string, so project sweepers are skipped. Value = `${value}` ", value != "") - } - - @Test - fun betaNightlyProjectServiceSweeperRunsInGoogleBeta() { - val project = googleCloudRootProject(testContextParameters()) - - // Find Beta nightly test project - val betaProject: Project? = project.subProjects.find { p-> p.name == betaProjectName} - if (betaProject == null) { - Assert.fail("Could not find the Google (GA) project") - } - val betaNightlyTestProject: Project? = betaProject!!.subProjects.find { p-> p.name == nightlyTestsProjectName} - if (betaNightlyTestProject == null) { - Assert.fail("Could not find the Google (GA) Nightly Test project") - } + val betaNightlyTestProject = getSubProject(project, betaProjectName, nightlyTestsProjectName) // Find sweeper inside val sweeper: BuildType? = betaNightlyTestProject!!.buildTypes.find { p-> p.name == ServiceSweeperName} diff --git a/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt b/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt index 747384496441..576e4b735c02 100644 --- a/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt +++ b/mmv1/third_party/terraform/.teamcity/tests/test_utils.kt @@ -8,10 +8,13 @@ package tests import builds.AllContextParameters +import jetbrains.buildServer.configs.kotlin.Project +import org.junit.Assert const val gaProjectName = "Google" const val betaProjectName = "Google Beta" const val nightlyTestsProjectName = "Nightly Tests" +const val mmUpstreamProjectName = "MM Upstream Testing" const val projectSweeperProjectName = "Project Sweeper" fun testContextParameters(): AllContextParameters { @@ -49,4 +52,19 @@ fun testContextParameters(): AllContextParameters { "zone", "infraProject", "vcrBucketName") +} + +fun getSubProject(rootProject: Project, parentProjectName: String, subProjectName: String): Project { + // Find parent project within root + var parentProject: Project? = rootProject.subProjects.find { p-> p.name == parentProjectName} + if (parentProject == null) { + Assert.fail("Could not find the $parentProjectName project") + } + // Find subproject within parent identified above + var subProject: Project? = parentProject!!.subProjects.find { p-> p.name == subProjectName} + if (subProject == null) { + Assert.fail("Could not find the $subProjectName project") + } + + return subProject!! } \ No newline at end of file diff --git a/mmv1/third_party/terraform/META.d/_summary.yaml b/mmv1/third_party/terraform/META.d/_summary.yaml index c3dc9c1febb3..cef4b73691f1 100644 --- a/mmv1/third_party/terraform/META.d/_summary.yaml +++ b/mmv1/third_party/terraform/META.d/_summary.yaml @@ -7,6 +7,6 @@ partition: tf-ecosystem summary: owner: team-tf-hybrid-cloud description: | - The Terraform Google provider is a plugin that allows Terraform to manage resources on Google Cloud Platform. + The Terraform provider for Google Cloud is a plugin that allows Terraform to manage resources on Google Cloud. - visibility: external \ No newline at end of file + visibility: external diff --git a/mmv1/third_party/terraform/go.mod.erb b/mmv1/third_party/terraform/go.mod.erb index 0d5943fb44fc..fa30a092113d 100644 --- a/mmv1/third_party/terraform/go.mod.erb +++ b/mmv1/third_party/terraform/go.mod.erb @@ -5,7 +5,7 @@ go 1.20 require ( cloud.google.com/go/bigtable v1.19.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.62.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.63.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 @@ -25,21 +25,21 @@ require ( github.com/mitchellh/go-homedir v1.1.0 github.com/mitchellh/hashstructure v1.1.0 github.com/sirupsen/logrus v1.8.1 - golang.org/x/net v0.20.0 - golang.org/x/oauth2 v0.16.0 - google.golang.org/api v0.156.0 - google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1 - google.golang.org/grpc v1.60.1 + golang.org/x/net v0.21.0 + golang.org/x/oauth2 v0.17.0 + google.golang.org/api v0.167.0 + google.golang.org/genproto/googleapis/rpc v0.0.0-20240213162025-012b6fc9bca9 + google.golang.org/grpc v1.61.1 google.golang.org/protobuf v1.32.0 ) require ( bitbucket.org/creachadair/stringset v0.0.8 // indirect - cloud.google.com/go v0.111.0 // indirect - cloud.google.com/go/compute v1.23.3 // indirect + cloud.google.com/go v0.112.0 // indirect + cloud.google.com/go/compute v1.23.4 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect - cloud.google.com/go/iam v1.1.5 // indirect - cloud.google.com/go/longrunning v0.5.4 // indirect + cloud.google.com/go/iam v1.1.6 // indirect + cloud.google.com/go/longrunning v0.5.5 // indirect github.com/ProtonMail/go-crypto v0.0.0-20230828082145-3c4c8a2d2371 // indirect github.com/agext/levenshtein v1.2.2 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect @@ -48,7 +48,7 @@ require ( github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cloudflare/circl v1.3.3 // indirect github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe // indirect - github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 // indirect + github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101 // indirect github.com/envoyproxy/go-control-plane v0.11.1 // indirect github.com/envoyproxy/protoc-gen-validate v1.0.2 // indirect github.com/fatih/color v1.13.0 // indirect @@ -62,9 +62,9 @@ require ( github.com/google/go-cmp v0.6.0 // indirect github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932 // indirect github.com/google/s2a-go v0.1.7 // indirect - github.com/google/uuid v1.5.0 // indirect + github.com/google/uuid v1.6.0 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect - github.com/googleapis/gax-go/v2 v2.12.0 // indirect + github.com/googleapis/gax-go/v2 v2.12.1 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-hclog v1.5.0 // indirect github.com/hashicorp/go-plugin v1.6.0 // indirect @@ -91,19 +91,19 @@ require ( github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect github.com/zclconf/go-cty v1.14.1 // indirect go.opencensus.io v0.24.0 // indirect - go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 // indirect - go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 // indirect - go.opentelemetry.io/otel v1.21.0 // indirect - go.opentelemetry.io/otel/metric v1.21.0 // indirect - go.opentelemetry.io/otel/trace v1.21.0 // indirect - golang.org/x/crypto v0.18.0 // indirect + go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.48.0 // indirect + go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.48.0 // indirect + go.opentelemetry.io/otel v1.23.0 // indirect + go.opentelemetry.io/otel/metric v1.23.0 // indirect + go.opentelemetry.io/otel/trace v1.23.0 // indirect + golang.org/x/crypto v0.19.0 // indirect golang.org/x/mod v0.14.0 // indirect golang.org/x/sync v0.6.0 // indirect - golang.org/x/sys v0.16.0 // indirect + golang.org/x/sys v0.17.0 // indirect golang.org/x/text v0.14.0 // indirect golang.org/x/time v0.5.0 // indirect google.golang.org/appengine v1.6.8 // indirect - google.golang.org/genproto v0.0.0-20240102182953-50ed04b92917 // indirect - google.golang.org/genproto/googleapis/api v0.0.0-20231212172506-995d672761c0 // indirect + google.golang.org/genproto v0.0.0-20240205150955-31a09d347014 // indirect + google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect ) diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index b265f1fc3590..63d8188cf48c 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -1,18 +1,18 @@ bitbucket.org/creachadair/stringset v0.0.8 h1:gQqe4vs8XWgMyijfyKE6K8o4TcyGGrRXe0JvHgx5H+M= bitbucket.org/creachadair/stringset v0.0.8/go.mod h1:AgthVMyMxC/6FK1KBJ2ALdqkZObGN8hOetgpwXyMn34= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go v0.111.0 h1:YHLKNupSD1KqjDbQ3+LVdQ81h/UJbJyZG203cEfnQgM= -cloud.google.com/go v0.111.0/go.mod h1:0mibmpKP1TyOOFYQY5izo0LnT+ecvOQ0Sg3OdmMiNRU= +cloud.google.com/go v0.112.0 h1:tpFCD7hpHFlQ8yPwT3x+QeXqc2T6+n6T+hmABHfDUSM= +cloud.google.com/go v0.112.0/go.mod h1:3jEEVwZ/MHU4djK5t5RHuKOA/GbLddgTdVubX1qnPD4= cloud.google.com/go/bigtable v1.19.0 h1:wiq9LT0kukfInzvy1joMDijCw/OD1UChpSbORXYn0LI= cloud.google.com/go/bigtable v1.19.0/go.mod h1:xl5kPa8PTkJjdBxg6qdGH88464nNqmbISHSRU+D2yFE= -cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk= -cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI= +cloud.google.com/go/compute v1.23.4 h1:EBT9Nw4q3zyE7G45Wvv3MzolIrCJEuHys5muLY0wvAw= +cloud.google.com/go/compute v1.23.4/go.mod h1:/EJMj55asU6kAFnuZET8zqgwgJ9FvXWXOkkfQZa4ioI= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= -cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI= -cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8= -cloud.google.com/go/longrunning v0.5.4 h1:w8xEcbZodnA2BbW6sVirkkoC+1gP8wS57EUUgGS0GVg= -cloud.google.com/go/longrunning v0.5.4/go.mod h1:zqNVncI0BOP8ST6XQD1+VcvuShMmq7+xFSzOL++V0dI= +cloud.google.com/go/iam v1.1.6 h1:bEa06k05IO4f4uJonbB5iAgKTPpABy1ayxaIZV/GHVc= +cloud.google.com/go/iam v1.1.6/go.mod h1:O0zxdPeGBoFdWW3HWmBxJsk0pfvNM/p/qa82rWOGTwI= +cloud.google.com/go/longrunning v0.5.5 h1:GOE6pZFdSrTb4KAiKnXsJBtlE6mEyaW44oKyMILWnOg= +cloud.google.com/go/longrunning v0.5.5/go.mod h1:WV2LAxD8/rg5Z1cNW6FJ/ZpX4E4VnDnoTk0yawPBB7s= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/GoogleCloudPlatform/declarative-resource-client-library v1.62.0 h1:s4Y6r6RrYLBnqosGXLwR0h1Gqr0VT3wgd6rqvHsD9OE= @@ -43,8 +43,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe h1:QQ3GSy+MqSHxm/d8nCtnAiZdYFd45cYZPs8vOOIYKfk= github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI= github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= -github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4 h1:/inchEIKaYC1Akx+H+gqO04wryn5h75LSazbRlnya1k= -github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101 h1:7To3pQ+pZo0i3dsWEbinPNFs5gPSBOsJtx3wTT94VBY= +github.com/cncf/xds/go v0.0.0-20231109132714-523115ebc101/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/creachadair/staticfile v0.1.2/go.mod h1:a3qySzCIXEprDGxk6tSxSI+dBBdLzqeBOMhZ+o2d3pM= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -120,12 +120,12 @@ github.com/google/go-cpy v0.0.0-20211218193943-a9c933c06932/go.mod h1:cC6EdPbj/1 github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o= github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= -github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs= github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0= -github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= -github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= +github.com/googleapis/gax-go/v2 v2.12.1 h1:9F8GV9r9ztXyAi00gsMQHNoF51xPZm8uj1dpYt2ZETM= +github.com/googleapis/gax-go/v2 v2.12.1/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 h1:+9834+KizmvFV7pXQGSXQTsaWhq2GjuNUt0aUU0YBYw= github.com/grpc-ecosystem/go-grpc-middleware v1.3.0/go.mod h1:z0ButlSOZa5vEBq9m2m2hlwIgKw+rp3sdCBRoJY+30Y= github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= @@ -249,17 +249,17 @@ github.com/zclconf/go-cty v1.14.1 h1:t9fyA35fwjjUMcmL5hLER+e/rEPqrbCK1/OSE4SI9KA github.com/zclconf/go-cty v1.14.1/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1 h1:SpGay3w+nEwMpfVnbqOLH5gY52/foP8RE8UzTZ1pdSE= -go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.46.1/go.mod h1:4UoMYEZOC0yN/sPGH76KPkkU7zgiEWYWL9vwmbnTJPE= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1 h1:aFJWCqJMNjENlcleuuOkGAPH82y0yULBScfXcIEdS24= -go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.46.1/go.mod h1:sEGXWArGqc3tVa+ekntsN65DmVbVeW+7lTKTjZF3/Fo= -go.opentelemetry.io/otel v1.21.0 h1:hzLeKBZEL7Okw2mGzZ0cc4k/A7Fta0uoPgaJCr8fsFc= -go.opentelemetry.io/otel v1.21.0/go.mod h1:QZzNPQPm1zLX4gZK4cMi+71eaorMSGT3A4znnUvNNEo= -go.opentelemetry.io/otel/metric v1.21.0 h1:tlYWfeo+Bocx5kLEloTjbcDwBuELRrIFxwdQ36PlJu4= -go.opentelemetry.io/otel/metric v1.21.0/go.mod h1:o1p3CA8nNHW8j5yuQLdc1eeqEaPfzug24uvsyIEJRWM= -go.opentelemetry.io/otel/sdk v1.19.0 h1:6USY6zH+L8uMH8L3t1enZPR3WFEmSTADlqldyHtJi3o= -go.opentelemetry.io/otel/trace v1.21.0 h1:WD9i5gzvoUPuXIXH24ZNBudiarZDKuekPqi/E8fpfLc= -go.opentelemetry.io/otel/trace v1.21.0/go.mod h1:LGbsEB0f9LGjN+OZaQQ26sohbOmiMR+BaslueVtS/qQ= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.48.0 h1:P+/g8GpuJGYbOp2tAdKrIPUX9JO02q8Q0YNlHolpibA= +go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.48.0/go.mod h1:tIKj3DbO8N9Y2xo52og3irLsPI4GW02DSMtrVgNMgxg= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.48.0 h1:doUP+ExOpH3spVTLS0FcWGLnQrPct/hD/bCPbDRUEAU= +go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.48.0/go.mod h1:rdENBZMT2OE6Ne/KLwpiXudnAsbdrdBaqBvTN8M8BgA= +go.opentelemetry.io/otel v1.23.0 h1:Df0pqjqExIywbMCMTxkAwzjLZtRf+bBKLbUcpxO2C9E= +go.opentelemetry.io/otel v1.23.0/go.mod h1:YCycw9ZeKhcJFrb34iVSkyT0iczq/zYDtZYFufObyB0= +go.opentelemetry.io/otel/metric v1.23.0 h1:pazkx7ss4LFVVYSxYew7L5I6qvLXHA0Ap2pwV+9Cnpo= +go.opentelemetry.io/otel/metric v1.23.0/go.mod h1:MqUW2X2a6Q8RN96E2/nqNoT+z9BSms20Jb7Bbp+HiTo= +go.opentelemetry.io/otel/sdk v1.21.0 h1:FTt8qirL1EysG6sTQRZ5TokkU8d0ugCj8htOgThZXQ8= +go.opentelemetry.io/otel/trace v1.23.0 h1:37Ik5Ib7xfYVb4V1UtnT97T1jI+AoIYkJyPkuL4iJgI= +go.opentelemetry.io/otel/trace v1.23.0/go.mod h1:GSGTbIClEsuZrGIzoEHqsVfxgn5UkggkflQwDScNUsk= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= @@ -269,8 +269,8 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= +golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= +golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= @@ -295,11 +295,11 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/net v0.20.0 h1:aCL9BSgETF1k+blQaYUBx9hJ9LOGP3gAVemcZlf1Kpo= -golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= +golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= +golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ= -golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o= +golang.org/x/oauth2 v0.17.0 h1:6m3ZPmLEFdVxKKWnKq4VqZ60gutO35zm+zrAHVmHyDQ= +golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -328,14 +328,14 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= +golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= -golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= +golang.org/x/term v0.17.0 h1:mkTF7LCd6WGJNL3K1Ad7kwxNfYAW6a8a8QqtMblp/4U= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -363,8 +363,8 @@ golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8T golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.156.0 h1:yloYcGbBtVYjLKQe4enCunxvwn3s2w/XPrrhVf6MsvQ= -google.golang.org/api v0.156.0/go.mod h1:bUSmn4KFO0Q+69zo9CNIDp4Psi6BqM0np0CbzKRSiSY= +google.golang.org/api v0.167.0 h1:CKHrQD1BLRii6xdkatBDXyKzM0mkawt2QP+H3LtPmSE= +google.golang.org/api v0.167.0/go.mod h1:4FcBc686KFi7QI/U51/2GKKevfZMpM17sCdibqe/bSA= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM= @@ -373,20 +373,20 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20240102182953-50ed04b92917 h1:nz5NESFLZbJGPFxDT/HCn+V1mZ8JGNoY4nUpmW/Y2eg= -google.golang.org/genproto v0.0.0-20240102182953-50ed04b92917/go.mod h1:pZqR+glSb11aJ+JQcczCvgf47+duRuzNSKqE8YAQnV0= -google.golang.org/genproto/googleapis/api v0.0.0-20231212172506-995d672761c0 h1:s1w3X6gQxwrLEpxnLd/qXTVLgQE2yXwaOaoa6IlY/+o= -google.golang.org/genproto/googleapis/api v0.0.0-20231212172506-995d672761c0/go.mod h1:CAny0tYF+0/9rmDB9fahA9YLzX3+AEVl1qXbv5hhj6c= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1 h1:gphdwh0npgs8elJ4T6J+DQJHPVF7RsuJHCfwztUb4J4= -google.golang.org/genproto/googleapis/rpc v0.0.0-20240108191215-35c7eff3a6b1/go.mod h1:daQN87bsDqDoe316QbbvX60nMoJQa4r6Ds0ZuoAe5yA= +google.golang.org/genproto v0.0.0-20240205150955-31a09d347014 h1:g/4bk7P6TPMkAUbUhquq98xey1slwvuVJPosdBqYJlU= +google.golang.org/genproto v0.0.0-20240205150955-31a09d347014/go.mod h1:xEgQu1e4stdSSsxPDK8Azkrk/ECl5HvdPf6nbZrTS5M= +google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014 h1:x9PwdEgd11LgK+orcck69WVRo7DezSO4VUMPI4xpc8A= +google.golang.org/genproto/googleapis/api v0.0.0-20240205150955-31a09d347014/go.mod h1:rbHMSEDyoYX62nRVLOCc4Qt1HbsdytAYoVwgjiOhF3I= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240213162025-012b6fc9bca9 h1:hZB7eLIaYlW9qXRfCq/qDaPdbeY3757uARz5Vvfv+cY= +google.golang.org/genproto/googleapis/rpc v0.0.0-20240213162025-012b6fc9bca9/go.mod h1:YUWgXUFRPfoYK1IHMuxH5K6nPEXSCzIMljnQ59lLRCk= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= -google.golang.org/grpc v1.60.1 h1:26+wFr+cNqSGFcOXcabYC0lUVJVRa2Sb2ortSK7VrEU= -google.golang.org/grpc v1.60.1/go.mod h1:OlCHIeLYqSSsLi6i49B5QGdzaMZK9+M7LXN2FKz4eGM= +google.golang.org/grpc v1.61.1 h1:kLAiWrZs7YeDM6MumDe7m3y4aM6wacLzM1Y/wiLP9XY= +google.golang.org/grpc v1.61.1/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFLBNJs= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= @@ -413,3 +413,5 @@ gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.63.0 h1:eSOBYPZVnU2fZul9sAJFGLVCgv6stNVKkmsogKF7UeY= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.63.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= diff --git a/mmv1/third_party/terraform/provider/provider.go.erb b/mmv1/third_party/terraform/provider/provider.go.erb index 4f7195a37355..c090b8c43096 100644 --- a/mmv1/third_party/terraform/provider/provider.go.erb +++ b/mmv1/third_party/terraform/provider/provider.go.erb @@ -299,12 +299,12 @@ func ProviderConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr // Check if the user provided a value from the universe_domain field other than the default if v, ok := d.GetOk("universe_domain"); ok && v.(string) != "googleapis.com" { if config.UniverseDomain == "" { - return nil, diag.FromErr(fmt.Errorf("Universe domain '%s' supplied directly to Terraform with no matching universe domain in credentials. Credentials with no 'universe_domain' set are assumed to be in the default universe.", v)) + return nil, diag.FromErr(fmt.Errorf("Universe domain mismatch: '%s' supplied directly to Terraform with no matching universe domain in credentials. Credentials with no 'universe_domain' set are assumed to be in the default universe.", v)) } else if v.(string) != config.UniverseDomain { if _, err := os.Stat(config.Credentials); err == nil { - return nil, diag.FromErr(fmt.Errorf("'%s' does not match the universe domain '%s' already set in the credential file '%s'. The 'universe_domain' provider configuration can not be used to override the universe domain that is defined in the active credential. Set the 'universe_domain' provider configuration when universe domain information is not already available in the credential, e.g. when authenticating with a JWT token.", v, config.UniverseDomain, config.Credentials)) + return nil, diag.FromErr(fmt.Errorf("Universe domain mismatch: '%s' does not match the universe domain '%s' already set in the credential file '%s'. The 'universe_domain' provider configuration can not be used to override the universe domain that is defined in the active credential. Set the 'universe_domain' provider configuration when universe domain information is not already available in the credential, e.g. when authenticating with a JWT token.", v, config.UniverseDomain, config.Credentials)) } else { - return nil, diag.FromErr(fmt.Errorf("'%s' does not match the universe domain '%s' supplied directly to Terraform. The 'universe_domain' provider configuration can not be used to override the universe domain that is defined in the active credential. Set the 'universe_domain' provider configuration when universe domain information is not already available in the credential, e.g. when authenticating with a JWT token.", v, config.UniverseDomain)) + return nil, diag.FromErr(fmt.Errorf("Universe domain mismatch: '%s' does not match the universe domain '%s' supplied directly to Terraform. The 'universe_domain' provider configuration can not be used to override the universe domain that is defined in the active credential. Set the 'universe_domain' provider configuration when universe domain information is not already available in the credential, e.g. when authenticating with a JWT token.", v, config.UniverseDomain)) } } } diff --git a/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go b/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go index c895e952970e..f345ac3d1968 100644 --- a/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go +++ b/mmv1/third_party/terraform/provider/universe/universe_domain_compute_test.go @@ -57,7 +57,7 @@ func TestAccDefaultUniverseDomain_doesNotMatchExplicit(t *testing.T) { Steps: []resource.TestStep{ resource.TestStep{ Config: testAccUniverseDomain_basic_disk(universeDomainFake), - ExpectError: regexp.MustCompile("supplied directly to Terraform with no matching universe domain in credentials"), + ExpectError: regexp.MustCompile("Universe domain mismatch"), }, }, }) diff --git a/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_type_test.go.erb b/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_type_test.go.erb new file mode 100644 index 000000000000..bef24217d444 --- /dev/null +++ b/mmv1/third_party/terraform/services/apigee/resource_apigee_environment_type_test.go.erb @@ -0,0 +1,179 @@ +<% autogen_exception -%> +package apigee_test +<% unless version == 'ga' -%> + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccApigeeEnvironment_apigeeEnvironmentTypeTestExampleUpdate(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckApigeeEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccApigeeEnvironment_apigeeEnvironmentTypeTestExample(context), + }, + { + ResourceName: "google_apigee_environment.apigee_environment", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"org_id"}, + }, + { + Config: testAccApigeeEnvironment_apigeeEnvironmentTypeTestExampleUpdate(context), + }, + { + ResourceName: "google_apigee_environment.apigee_environment", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"org_id"}, + }, + }, + }) +} + +func testAccApigeeEnvironment_apigeeEnvironmentTypeTestExampleUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + provider = google-beta + + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} + +resource "google_project_service" "apigee" { + provider = google-beta + + project = google_project.project.project_id + service = "apigee.googleapis.com" +} + +resource "google_project_service" "compute" { + provider = google-beta + + project = google_project.project.project_id + service = "compute.googleapis.com" +} + +resource "google_project_service" "servicenetworking" { + provider = google-beta + + project = google_project.project.project_id + service = "servicenetworking.googleapis.com" +} + +resource "google_project_service" "kms" { + provider = google-beta + + project = google_project.project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_compute_network" "apigee_network" { + provider = google-beta + + name = "apigee-network" + project = google_project.project.project_id + depends_on = [google_project_service.compute] +} + +resource "google_compute_global_address" "apigee_range" { + provider = google-beta + + name = "tf-test-apigee-range%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 16 + network = google_compute_network.apigee_network.id + project = google_project.project.project_id +} + +resource "google_service_networking_connection" "apigee_vpc_connection" { + provider = google-beta + + network = google_compute_network.apigee_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.apigee_range.name] + depends_on = [google_project_service.servicenetworking] +} + +resource "google_kms_key_ring" "apigee_keyring" { + provider = google-beta + + name = "apigee-keyring" + location = "us-central1" + project = google_project.project.project_id + depends_on = [google_project_service.kms] +} + +resource "google_kms_crypto_key" "apigee_key" { + provider = google-beta + + name = "apigee-key" + key_ring = google_kms_key_ring.apigee_keyring.id +} + +resource "google_project_service_identity" "apigee_sa" { + provider = google-beta + + project = google_project.project.project_id + service = google_project_service.apigee.service +} + +resource "google_kms_crypto_key_iam_binding" "apigee_sa_keyuser" { + provider = google-beta + + crypto_key_id = google_kms_crypto_key.apigee_key.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + members = [ + "serviceAccount:${google_project_service_identity.apigee_sa.email}", + ] +} + +resource "google_apigee_organization" "apigee_org" { + provider = google-beta + + analytics_region = "us-central1" + project_id = google_project.project.project_id + authorized_network = google_compute_network.apigee_network.id + billing_type = "PAYG" + runtime_database_encryption_key_name = google_kms_crypto_key.apigee_key.id + + depends_on = [ + google_service_networking_connection.apigee_vpc_connection, + google_project_service.apigee, + google_kms_crypto_key_iam_binding.apigee_sa_keyuser, + ] +} + +resource "google_apigee_environment" "apigee_environment" { + provider = google-beta + + org_id = google_apigee_organization.apigee_org.id + name = "tf-test%{random_suffix}" + description = "Apigee Environment" + display_name = "tf-test%{random_suffix}" + type = "INTERMEDIATE" +} +`, context) +} + +<% end -%> diff --git a/mmv1/third_party/terraform/services/appengine/resource_app_engine_service_network_settings_test.go b/mmv1/third_party/terraform/services/appengine/resource_app_engine_service_network_settings_test.go index 6e55010a6df5..35e35974c066 100644 --- a/mmv1/third_party/terraform/services/appengine/resource_app_engine_service_network_settings_test.go +++ b/mmv1/third_party/terraform/services/appengine/resource_app_engine_service_network_settings_test.go @@ -56,7 +56,7 @@ resource "google_app_engine_standard_app_version" "app" { service = "app-%{random_suffix}" delete_service_on_destroy = true - runtime = "nodejs10" + runtime = "nodejs20" entrypoint { shell = "node ./app.js" } @@ -96,7 +96,7 @@ resource "google_app_engine_standard_app_version" "app" { service = "app-%{random_suffix}" delete_service_on_destroy = true - runtime = "nodejs10" + runtime = "nodejs20" entrypoint { shell = "node ./app.js" } diff --git a/mmv1/third_party/terraform/services/appengine/resource_app_engine_standard_app_version_test.go b/mmv1/third_party/terraform/services/appengine/resource_app_engine_standard_app_version_test.go index 225c6134f61f..024b3d6a5931 100644 --- a/mmv1/third_party/terraform/services/appengine/resource_app_engine_standard_app_version_test.go +++ b/mmv1/third_party/terraform/services/appengine/resource_app_engine_standard_app_version_test.go @@ -20,7 +20,10 @@ func TestAccAppEngineStandardAppVersion_update(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckAppEngineStandardAppVersionDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + CheckDestroy: testAccCheckAppEngineStandardAppVersionDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccAppEngineStandardAppVersion_python(context), @@ -161,11 +164,20 @@ resource "google_project_service" "project" { disable_dependent_services = false } +resource "time_sleep" "wait_60_seconds" { + depends_on = [google_project.my_project] + + create_duration = "60s" +} + resource "google_project_service" "vpcaccess_api" { project = google_project.my_project.project_id service = "vpcaccess.googleapis.com" disable_dependent_services = false + + # Needed for CI tests for permissions to propagate, should not be needed for actual usage + depends_on = [time_sleep.wait_60_seconds] } resource "google_vpc_access_connector" "bar" { diff --git a/mmv1/third_party/terraform/services/apphub/resource_apphub_application_test.go b/mmv1/third_party/terraform/services/apphub/resource_apphub_application_test.go new file mode 100644 index 000000000000..3e68916cc73e --- /dev/null +++ b/mmv1/third_party/terraform/services/apphub/resource_apphub_application_test.go @@ -0,0 +1,210 @@ +package apphub_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccApphubApplication_applicationUpdateFull(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckApphubApplicationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccApphubApplication_applicationFullExample(context), + }, + { + ResourceName: "google_apphub_application.example2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "application_id"}, + }, + { + Config: testAccApphubApplication_applicationUpdateDisplayName(context), + }, + { + ResourceName: "google_apphub_application.example2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "application_id"}, + }, + { + Config: testAccApphubApplication_applicationUpdateEnvironment(context), + }, + { + ResourceName: "google_apphub_application.example2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "application_id"}, + }, + { + Config: testAccApphubApplication_applicationUpdateCriticality(context), + }, + { + ResourceName: "google_apphub_application.example2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "application_id"}, + }, + { + Config: testAccApphubApplication_applicationUpdateOwners(context), + }, + { + ResourceName: "google_apphub_application.example2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "application_id"}, + }, + }, + }) +} + +func testAccApphubApplication_applicationUpdateDisplayName(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_apphub_application" "example2" { + location = "us-east1" + application_id = "tf-test-example-application%{random_suffix}" + display_name = "Application Full New%{random_suffix}" + scope { + type = "REGIONAL" + } + attributes { + environment { + type = "STAGING" + } + criticality { + type = "MISSION_CRITICAL" + } + business_owners { + display_name = "Alice%{random_suffix}" + email = "alice@google.com%{random_suffix}" + } + developer_owners { + display_name = "Bob%{random_suffix}" + email = "bob@google.com%{random_suffix}" + } + operator_owners { + display_name = "Charlie%{random_suffix}" + email = "charlie@google.com%{random_suffix}" + } + } +} +`, context) +} + +func testAccApphubApplication_applicationUpdateEnvironment(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_apphub_application" "example2" { + location = "us-east1" + application_id = "tf-test-example-application%{random_suffix}" + display_name = "Application Full New%{random_suffix}" + scope { + type = "REGIONAL" + } + attributes { + environment { + type = "TEST" + } + criticality { + type = "MISSION_CRITICAL" + } + business_owners { + display_name = "Alice%{random_suffix}" + email = "alice@google.com%{random_suffix}" + } + developer_owners { + display_name = "Bob%{random_suffix}" + email = "bob@google.com%{random_suffix}" + } + operator_owners { + display_name = "Charlie%{random_suffix}" + email = "charlie@google.com%{random_suffix}" + } + } +} +`, context) +} + +func testAccApphubApplication_applicationUpdateCriticality(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_apphub_application" "example2" { + location = "us-east1" + application_id = "tf-test-example-application%{random_suffix}" + display_name = "Application Full New%{random_suffix}" + scope { + type = "REGIONAL" + } + attributes { + environment { + type = "TEST" + } + criticality { + type = "MEDIUM" + } + business_owners { + display_name = "Alice%{random_suffix}" + email = "alice@google.com%{random_suffix}" + } + developer_owners { + display_name = "Bob%{random_suffix}" + email = "bob@google.com%{random_suffix}" + } + operator_owners { + display_name = "Charlie%{random_suffix}" + email = "charlie@google.com%{random_suffix}" + } + } +} +`, context) +} + +func testAccApphubApplication_applicationUpdateOwners(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_apphub_application" "example2" { + location = "us-east1" + application_id = "tf-test-example-application%{random_suffix}" + display_name = "Application Full New%{random_suffix}" + scope { + type = "REGIONAL" + } + attributes { + environment { + type = "TEST" + } + criticality { + type = "MEDIUM" + } + business_owners { + display_name = "Alice%{random_suffix}" + email = "alice@google.com%{random_suffix}" + } + developer_owners { + display_name = "Bob%{random_suffix}" + email = "bob@google.com%{random_suffix}" + } + developer_owners { + display_name = "Derek%{random_suffix}" + email = "derek@google.com%{random_suffix}" + } + operator_owners { + display_name = "Charlie%{random_suffix}" + email = "charlie@google.com%{random_suffix}" + } + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_dataset_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_dataset_test.go index a4bd6a1a5dea..f4c23fd09306 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_dataset_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_dataset_test.go @@ -2,6 +2,8 @@ package bigquery_test import ( "fmt" + "regexp" + "strings" "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" @@ -326,6 +328,47 @@ func TestAccBigQueryDataset_storageBillModel(t *testing.T) { }) } +func TestAccBigQueryDataset_invalidCharacterInID(t *testing.T) { + t.Parallel() + // Not an acceptance test. + acctest.SkipIfVcr(t) + + datasetID := fmt.Sprintf("tf_test_%s-with-hyphens", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryDatasetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryDataset(datasetID), + ExpectError: regexp.MustCompile("must contain only letters.+numbers.+or underscores.+"), + }, + }, + }) +} + +func TestAccBigQueryDataset_invalidLongID(t *testing.T) { + t.Parallel() + // Not an acceptance test. + acctest.SkipIfVcr(t) + + datasetSuffix := acctest.RandString(t, 10) + datasetID := fmt.Sprintf("tf_test_%s", strings.Repeat(datasetSuffix, 200)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryDatasetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryDataset(datasetID), + ExpectError: regexp.MustCompile(".+cannot be greater than 1,024 characters"), + }, + }, + }) +} + func testAccAddTable(t *testing.T, datasetID string, tableID string) resource.TestCheckFunc { // Not actually a check, but adds a table independently of terraform return func(s *terraform.State) error { diff --git a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go index bd106a6f079b..00f5763b505b 100644 --- a/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go +++ b/mmv1/third_party/terraform/services/bigquery/resource_bigquery_routine_test.go @@ -173,3 +173,171 @@ resource "google_bigquery_routine" "spark_jar" { } `, context) } + +func TestAccBigQueryRoutine_bigQueryRoutineRemoteFunction(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "zip_path": "./test-fixtures/function-source.zip", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigQueryRoutineDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryRoutine_bigQueryRoutineRemoteFunction(context), + }, + { + ResourceName: "google_bigquery_routine.remote_function_routine", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryRoutine_bigQueryRoutineRemoteFunction_Update(context), + }, + { + ResourceName: "google_bigquery_routine.remote_function_routine", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccBigQueryRoutine_bigQueryRoutineRemoteFunction(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_bucket" "default" { + name = "%{random_suffix}-gcf-source" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.default.name + source = "%{zip_path}" +} + +resource "google_cloudfunctions2_function" "default" { + name = "function-v2-0" + location = "us-central1" + description = "a new function" + + build_config { + runtime = "nodejs18" + entry_point = "helloHttp" + source { + storage_source { + bucket = google_storage_bucket.default.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +resource "google_bigquery_connection" "test" { + connection_id = "tf_test_connection_id%{random_suffix}" + location = "US" + cloud_resource { } +} + +resource "google_bigquery_dataset" "test" { + dataset_id = "tf_test_dataset_id%{random_suffix}" +} + +resource "google_bigquery_routine" "remote_function_routine" { + dataset_id = "${google_bigquery_dataset.test.dataset_id}" + routine_id = "tf_test_routine_id%{random_suffix}" + routine_type = "SCALAR_FUNCTION" + definition_body = "" + + return_type = "{\"typeKind\" : \"STRING\"}" + + remote_function_options { + endpoint = google_cloudfunctions2_function.default.service_config[0].uri + connection = "${google_bigquery_connection.test.name}" + max_batching_rows = "10" + user_defined_context = { + "z": "1.5", + } + } +} +`, context) +} + +func testAccBigQueryRoutine_bigQueryRoutineRemoteFunction_Update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_bucket" "default" { + name = "%{random_suffix}-gcf-source" + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + name = "function-source.zip" + bucket = google_storage_bucket.default.name + source = "%{zip_path}" +} + +resource "google_cloudfunctions2_function" "default2" { + name = "function-v2-1" + location = "us-central1" + description = "a new new function" + + build_config { + runtime = "nodejs18" + entry_point = "helloHttp" + source { + storage_source { + bucket = google_storage_bucket.default.name + object = google_storage_bucket_object.object.name + } + } + } + + service_config { + max_instance_count = 1 + available_memory = "256M" + timeout_seconds = 60 + } +} + +resource "google_bigquery_connection" "test2" { + connection_id = "tf_test_connection2_id%{random_suffix}" + location = "US" + cloud_resource { } +} + +resource "google_bigquery_dataset" "test" { + dataset_id = "tf_test_dataset_id%{random_suffix}" +} + +resource "google_bigquery_routine" "remote_function_routine" { + dataset_id = "${google_bigquery_dataset.test.dataset_id}" + routine_id = "tf_test_routine_id%{random_suffix}" + routine_type = "SCALAR_FUNCTION" + definition_body = "" + + return_type = "{\"typeKind\" : \"STRING\"}" + + remote_function_options { + endpoint = google_cloudfunctions2_function.default2.service_config[0].uri + connection = "${google_bigquery_connection.test2.name}" + max_batching_rows = "5" + user_defined_context = { + "z": "1.2", + "w": "test", + } + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/bigquery/test-fixtures/function-source.zip b/mmv1/third_party/terraform/services/bigquery/test-fixtures/function-source.zip new file mode 100644 index 000000000000..1cb571888ef5 Binary files /dev/null and b/mmv1/third_party/terraform/services/bigquery/test-fixtures/function-source.zip differ diff --git a/mmv1/third_party/terraform/services/certificatemanager/resource_certificate_manager_dns_authorization_upgrade_test.go b/mmv1/third_party/terraform/services/certificatemanager/resource_certificate_manager_dns_authorization_upgrade_test.go new file mode 100644 index 000000000000..293628ee322b --- /dev/null +++ b/mmv1/third_party/terraform/services/certificatemanager/resource_certificate_manager_dns_authorization_upgrade_test.go @@ -0,0 +1,78 @@ +package certificatemanager_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +// Tests schema version migration by creating a dns authorization with an old version of the provider (5.15.0) +// and then updating it with the current version the provider. +func TestAccCertificateManagerDnsAuthorization_migration(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + + oldVersion := map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "5.15.0", // a version that doesn't support location yet. + Source: "registry.terraform.io/hashicorp/google", + }, + } + newVersion := map[string]func() (*schema.Provider, error){ + "mynewprovider": func() (*schema.Provider, error) { return acctest.TestAccProviders["google"], nil }, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + CheckDestroy: testAccCheckCertificateManagerDnsAuthorizationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: dnsAuthorizationResourceConfig(name), + ExternalProviders: oldVersion, + }, + { + ResourceName: "google_certificate_manager_dns_authorization.default", + ImportState: true, + ImportStateVerifyIgnore: []string{"location"}, + ExternalProviders: oldVersion, + }, + { + Config: dnsAuthorizationResourceConfigUpdated(name), + ProviderFactories: newVersion, + }, + { + ResourceName: "google_certificate_manager_dns_authorization.default", + ImportState: true, + ImportStateVerifyIgnore: []string{"location"}, + ProviderFactories: newVersion, + }, + }, + }) +} + +func dnsAuthorizationResourceConfig(name string) string { + return fmt.Sprintf(` + resource "google_certificate_manager_dns_authorization" "default" { + name = "%s" + description = "The default dns" + domain = "domain.hashicorptest.com" + } + `, name) +} + +func dnsAuthorizationResourceConfigUpdated(name string) string { + return fmt.Sprintf(` + provider "mynewprovider" {} + + resource "google_certificate_manager_dns_authorization" "default" { + provider = mynewprovider + name = "%s" + description = "The migrated default dns" + domain = "domain.hashicorptest.com" + } + `, name) +} diff --git a/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go.erb b/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go similarity index 57% rename from mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go.erb rename to mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go index 601f55640516..82f1fe3a0e5c 100644 --- a/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go.erb +++ b/mmv1/third_party/terraform/services/clouddeploy/resource_clouddeploy_automation_test.go @@ -1,56 +1,52 @@ -<% autogen_exception -%> package clouddeploy_test - -<% unless version == 'ga' -%> import ( - "testing" + "testing" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-provider-google/google/acctest" - "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" ) func TestAccClouddeployAutomation_update(t *testing.T) { - t.Parallel() + t.Parallel() - context := map[string]interface{}{ - "service_account": envvar.GetTestServiceAccountFromEnv(t), - "random_suffix": acctest.RandString(t, 10), - } + context := map[string]interface{}{ + "service_account": envvar.GetTestServiceAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } - acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, - ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), - CheckDestroy: testAccCheckClouddeployAutomationDestroyProducer(t), - Steps: []resource.TestStep{ - { - Config: testAccClouddeployAutomation_basic(context), - }, - { - ResourceName: "google_clouddeploy_automation.automation", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "delivery_pipeline", "annotations", "labels", "terraform_labels"}, - }, - { - Config: testAccClouddeployAutomation_update(context), - }, - { - ResourceName: "google_clouddeploy_automation.automation", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"location", "delivery_pipeline", "annotations", "labels", "terraform_labels"}, - }, - }, - }) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckClouddeployAutomationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccClouddeployAutomation_basic(context), + }, + { + ResourceName: "google_clouddeploy_automation.automation", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "delivery_pipeline", "annotations", "labels", "terraform_labels"}, + }, + { + Config: testAccClouddeployAutomation_update(context), + }, + { + ResourceName: "google_clouddeploy_automation.automation", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "delivery_pipeline", "annotations", "labels", "terraform_labels"}, + }, + }, + }) } func testAccClouddeployAutomation_basic(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_clouddeploy_automation" "automation" { - provider = google-beta name = "tf-test-cd-automation%{random_suffix}" location = "us-central1" delivery_pipeline = google_clouddeploy_delivery_pipeline.pipeline.name @@ -71,7 +67,6 @@ resource "google_clouddeploy_automation" "automation" { } resource "google_clouddeploy_delivery_pipeline" "pipeline" { - provider = google-beta name = "tf-test-cd-pipeline%{random_suffix}" location = "us-central1" serial_pipeline { @@ -85,10 +80,9 @@ resource "google_clouddeploy_delivery_pipeline" "pipeline" { } func testAccClouddeployAutomation_update(context map[string]interface{}) string { - return acctest.Nprintf(` + return acctest.Nprintf(` resource "google_clouddeploy_automation" "automation" { - provider = google-beta name = "tf-test-cd-automation%{random_suffix}" location = "us-central1" delivery_pipeline = google_clouddeploy_delivery_pipeline.pipeline.name @@ -129,7 +123,6 @@ resource "google_clouddeploy_automation" "automation" { } resource "google_clouddeploy_delivery_pipeline" "pipeline" { - provider = google-beta name = "tf-test-cd-pipeline%{random_suffix}" location = "us-central1" serial_pipeline { @@ -141,5 +134,3 @@ resource "google_clouddeploy_delivery_pipeline" "pipeline" { } `, context) } -<% end -%> - diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go index b6d3d77906e2..4c3c1489dd66 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function.go @@ -201,7 +201,7 @@ func ResourceCloudFunctionsFunction() *schema.Resource { Type: schema.TypeString, Optional: true, Computed: true, - Description: `Docker Registry to use for storing the function's Docker images. Allowed values are CONTAINER_REGISTRY (default) and ARTIFACT_REGISTRY.`, + Description: `Docker Registry to use for storing the function's Docker images. Allowed values are ARTIFACT_REGISTRY (default) and CONTAINER_REGISTRY.`, }, "docker_repository": { diff --git a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.erb b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.erb index 61a267c0fcd8..6288ae43d97b 100644 --- a/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.erb +++ b/mmv1/third_party/terraform/services/cloudfunctions/resource_cloudfunctions_function_test.go.erb @@ -52,7 +52,7 @@ func TestAccCloudFunctionsFunction_basic(t *testing.T) { resource.TestCheckResourceAttr(funcResourceName, "description", "test function"), resource.TestCheckResourceAttr(funcResourceName, - "docker_registry", "CONTAINER_REGISTRY"), + "docker_registry", "ARTIFACT_REGISTRY"), resource.TestCheckResourceAttr(funcResourceName, "available_memory_mb", "128"), resource.TestCheckResourceAttr(funcResourceName, @@ -659,7 +659,7 @@ resource "google_cloudfunctions_function" "function" { name = "%s" runtime = "nodejs10" description = "test function" - docker_registry = "CONTAINER_REGISTRY" + docker_registry = "ARTIFACT_REGISTRY" available_memory_mb = 128 source_archive_bucket = google_storage_bucket.bucket.name source_archive_object = google_storage_bucket_object.archive.name @@ -762,7 +762,7 @@ resource "google_cloudfunctions_function" "function" { name = "%[3]s" runtime = "nodejs10" description = "test function" - docker_registry = "CONTAINER_REGISTRY" + docker_registry = "ARTIFACT_REGISTRY" available_memory_mb = 128 source_archive_bucket = google_storage_bucket.bucket.name source_archive_object = google_storage_bucket_object.archive.name diff --git a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.erb b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.erb index f74b09be0e2f..bacaa401b2d6 100644 --- a/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.erb +++ b/mmv1/third_party/terraform/services/cloudrunv2/resource_cloud_run_v2_service_test.go.erb @@ -794,6 +794,107 @@ func TestAccCloudRunV2Service_cloudrunv2ServiceAttributionLabel(t *testing.T) { }) } +<% unless version == 'ga' -%> +func TestAccCloudRunV2Service_cloudrunv2ServiceWithServiceMinInstances(t *testing.T) { + t.Parallel() + context := map[string]interface{} { + "random_suffix" : acctest.RandString(t, 10), + } + acctest.VcrTest(t, resource.TestCase { + PreCheck: func() { acctest.AccTestPreCheck(t)}, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckCloudRunV2ServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccCloudRunV2Service_cloudrunv2ServiceWithMinInstances(context), + }, + { + ResourceName: "google_cloud_run_v2_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage"}, + }, + { + Config: testAccCloudRunV2Service_cloudrunv2ServiceWithNoMinInstances(context), + }, + { + ResourceName: "google_cloud_run_v2_service.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "location", "annotations", "labels", "terraform_labels", "launch_stage"}, + }, + + }, + }) +} + +func testAccCloudRunV2Service_cloudrunv2ServiceWithNoMinInstances(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_service" "default" { + name = "tf-test-cloudrun-service%{random_suffix}" + description = "description creating" + location = "us-central1" + launch_stage = "BETA" + annotations = { + generated-by = "magic-modules" + } + ingress = "INGRESS_TRAFFIC_ALL" + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + template { + containers { + name = "container-1" + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } + lifecycle { + ignore_changes = [ + launch_stage, + ] + } +} + +`, context) +} +func testAccCloudRunV2Service_cloudrunv2ServiceWithMinInstances(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_cloud_run_v2_service" "default" { + name = "tf-test-cloudrun-service%{random_suffix}" + description = "description creating" + location = "us-central1" + launch_stage = "BETA" + annotations = { + generated-by = "magic-modules" + } + ingress = "INGRESS_TRAFFIC_ALL" + labels = { + label-1 = "value-1" + } + client = "client-1" + client_version = "client-version-1" + scaling { + min_instance_count = 1 + } + template { + containers { + name = "container-1" + image = "us-docker.pkg.dev/cloudrun/container/hello" + } + } + lifecycle { + ignore_changes = [ + launch_stage, + ] + } +} + +`, context) +} +<% end -%> + func testAccCloudRunV2Service_cloudrunv2ServiceWithAttributionLabel(context map[string]interface{}) string { return acctest.Nprintf(` provider "google" { diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.erb b/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.erb index 536af7709b4b..e192b510fa65 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.erb +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment.go.erb @@ -7,6 +7,7 @@ import ( "regexp" "strings" "time" + "context" "github.com/hashicorp/go-version" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" @@ -168,6 +169,12 @@ func ResourceComposerEnvironment() *schema.Resource { tpgresource.DefaultProviderProject, tpgresource.DefaultProviderRegion, tpgresource.SetLabelsDiff, +<% unless version == "ga" -%> + customdiff.ForceNewIf("config.0.node_config.0.network", forceNewCustomDiff("config.0.node_config.0.network")), + customdiff.ForceNewIf("config.0.node_config.0.subnetwork", forceNewCustomDiff("config.0.node_config.0.subnetwork")), + customdiff.ValidateChange("config.0.software_config.0.image_version", imageVersionChangeValidationFunc), + versionValidationCustomizeDiffFunc, +<% end -%> ), Schema: map[string]*schema.Schema{ @@ -237,17 +244,37 @@ func ResourceComposerEnvironment() *schema.Resource { Type: schema.TypeString, Computed: true, Optional: true, +<% if version == "ga" -%> ForceNew: true, +<% else -%> + ForceNew: false, + ConflictsWith: []string{"config.0.node_config.0.composer_network_attachment"}, +<% end -%> DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, Description: `The Compute Engine machine type used for cluster instances, specified as a name or relative resource name. For example: "projects/{project}/zones/{zone}/machineTypes/{machineType}". Must belong to the enclosing environment's project and region/zone. The network must belong to the environment's project. If unspecified, the "default" network ID in the environment's project is used. If a Custom Subnet Network is provided, subnetwork must also be provided.`, }, "subnetwork": { Type: schema.TypeString, Optional: true, +<% if version == "ga" -%> ForceNew: true, +<% else -%> + ForceNew: false, + Computed: true, + ConflictsWith: []string{"config.0.node_config.0.composer_network_attachment"}, +<% end -%> DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, - Description: `The Compute Engine subnetwork to be used for machine communications, , specified as a self-link, relative resource name (e.g. "projects/{project}/regions/{region}/subnetworks/{subnetwork}"), or by name. If subnetwork is provided, network must also be provided and the subnetwork must belong to the enclosing environment's project and region.`, + Description: `The Compute Engine subnetwork to be used for machine communications, specified as a self-link, relative resource name (e.g. "projects/{project}/regions/{region}/subnetworks/{subnetwork}"), or by name. If subnetwork is provided, network must also be provided and the subnetwork must belong to the enclosing environment's project and region.`, }, +<% unless version == "ga" -%> + "composer_network_attachment": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ForceNew: false, + Description: `PSC (Private Service Connect) Network entry point. Customers can pre-create the Network Attachment and point Cloud Composer environment to use. It is possible to share network attachment among many environments, provided enough IP addresses are available.`, + }, +<% end -%> "disk_size_gb": { Type: schema.TypeInt, Computed: true, @@ -1190,6 +1217,68 @@ func resourceComposerEnvironmentUpdate(d *schema.ResourceData, meta interface{}) return err } +<% unless version == "ga" -%> + noChangeErrorMessage := "Update request does not result in any change to the environment's configuration" + if d.HasChange("config.0.node_config.0.network") || d.HasChange("config.0.node_config.0.subnetwork"){ + // step 1: update with empty network and subnetwork + patchObjEmpty := &composer.Environment{ + Config: &composer.EnvironmentConfig{ + NodeConfig: &composer.NodeConfig{}, + }, + } + err = resourceComposerEnvironmentPatchField("config.nodeConfig.network,config.nodeConfig.subnetwork", userAgent, patchObjEmpty, d, tfConfig) + if err != nil && !strings.Contains(err.Error(), noChangeErrorMessage){ + return err + } + + // step 2: update with new network and subnetwork, if new values are not empty + if (config.NodeConfig.Network != "" && config.NodeConfig.Subnetwork != ""){ + patchObj := &composer.Environment{ + Config: &composer.EnvironmentConfig{ + NodeConfig: &composer.NodeConfig{}, + }, + } + if config != nil && config.NodeConfig != nil { + patchObj.Config.NodeConfig.Network = config.NodeConfig.Network + patchObj.Config.NodeConfig.Subnetwork = config.NodeConfig.Subnetwork + } + err = resourceComposerEnvironmentPatchField("config.nodeConfig.network,config.nodeConfig.subnetwork", userAgent, patchObj, d, tfConfig) + if err != nil { + return err + } + } + } + + if d.HasChange("config.0.node_config.0.composer_network_attachment") { + // step 1: update with empty composer_network_attachment + patchObjEmpty := &composer.Environment{ + Config: &composer.EnvironmentConfig{ + NodeConfig: &composer.NodeConfig{}, + }, + } + err = resourceComposerEnvironmentPatchField("config.nodeConfig.composerNetworkAttachment", userAgent, patchObjEmpty, d, tfConfig) + if err != nil && !strings.Contains(err.Error(), noChangeErrorMessage){ + return err + } + + // step 2: update with new composer_network_attachment + if (config.NodeConfig.ComposerNetworkAttachment != ""){ + patchObj := &composer.Environment{ + Config: &composer.EnvironmentConfig{ + NodeConfig: &composer.NodeConfig{}, + }, + } + if config != nil && config.NodeConfig != nil { + patchObj.Config.NodeConfig.ComposerNetworkAttachment = config.NodeConfig.ComposerNetworkAttachment + } + err = resourceComposerEnvironmentPatchField("config.nodeConfig.composerNetworkAttachment", userAgent, patchObj, d, tfConfig) + if err != nil { + return err + } + } + } +<% end -%> + <% unless version == "ga" -%> if d.HasChange("config.0.software_config.0.image_version") { patchObj := &composer.Environment{ @@ -1592,10 +1681,15 @@ func flattenComposerEnvironmentConfig(envCfg *composer.EnvironmentConfig) interf transformed["airflow_uri"] = envCfg.AirflowUri transformed["node_config"] = flattenComposerEnvironmentConfigNodeConfig(envCfg.NodeConfig) transformed["software_config"] = flattenComposerEnvironmentConfigSoftwareConfig(envCfg.SoftwareConfig) - transformed["private_environment_config"] = flattenComposerEnvironmentConfigPrivateEnvironmentConfig(envCfg.PrivateEnvironmentConfig) + imageVersion := envCfg.SoftwareConfig.ImageVersion + if !isComposer3(imageVersion){ + transformed["private_environment_config"] = flattenComposerEnvironmentConfigPrivateEnvironmentConfig(envCfg.PrivateEnvironmentConfig) + } <% unless version == "ga" -%> - transformed["enable_private_environment"] = envCfg.PrivateEnvironmentConfig.EnablePrivateEnvironment - transformed["enable_private_builds_only"] = envCfg.PrivateEnvironmentConfig.EnablePrivateBuildsOnly + if isComposer3(imageVersion) && envCfg.PrivateEnvironmentConfig != nil { + transformed["enable_private_environment"] = envCfg.PrivateEnvironmentConfig.EnablePrivateEnvironment + transformed["enable_private_builds_only"] = envCfg.PrivateEnvironmentConfig.EnablePrivateBuildsOnly + } <% end -%> transformed["web_server_network_access_control"] = flattenComposerEnvironmentConfigWebServerNetworkAccessControl(envCfg.WebServerNetworkAccessControl) transformed["database_config"] = flattenComposerEnvironmentConfigDatabaseConfig(envCfg.DatabaseConfig) @@ -1803,7 +1897,9 @@ func flattenComposerEnvironmentConfigWorkloadsConfig(workloadsConfig *composer.W transformed["web_server"] = []interface{}{transformedWebServer} transformed["worker"] = []interface{}{transformedWorker} <% unless version == "ga" -%> - transformed["dag_processor"] = []interface{}{transformedDagProcessor} + if transformedDagProcessor != nil { + transformed["dag_processor"] = []interface{}{transformedDagProcessor} + } <% end -%> @@ -1841,6 +1937,9 @@ func flattenComposerEnvironmentConfigNodeConfig(nodeCfg *composer.NodeConfig) in transformed["machine_type"] = nodeCfg.MachineType transformed["network"] = nodeCfg.Network transformed["subnetwork"] = nodeCfg.Subnetwork +<% unless version == "ga" -%> + transformed["composer_network_attachment"] = nodeCfg.ComposerNetworkAttachment +<% end -%> transformed["disk_size_gb"] = nodeCfg.DiskSizeGb transformed["service_account"] = nodeCfg.ServiceAccount transformed["oauth_scopes"] = flattenComposerEnvironmentConfigNodeConfigOauthScopes(nodeCfg.OauthScopes) @@ -1982,7 +2081,8 @@ func expandComposerEnvironmentConfig(v interface{}, d *schema.ResourceData, conf composer.PrivateEnvironmentConfig.EnablePrivateEnvironment in API. Check image version to avoid overriding EnablePrivateEnvironment in case of other versions. */ - if isComposer3(d, config) { + imageVersion := d.Get("config.0.software_config.0.image_version").(string) + if isComposer3(imageVersion) { transformed.PrivateEnvironmentConfig = &composer.PrivateEnvironmentConfig{} if enablePrivateEnvironmentRaw, ok := original["enable_private_environment"]; ok { transformed.PrivateEnvironmentConfig.EnablePrivateEnvironment = enablePrivateEnvironmentRaw.(bool) @@ -2457,6 +2557,13 @@ func expandComposerEnvironmentConfigNodeConfig(v interface{}, d *schema.Resource } transformed.Subnetwork = transformedSubnetwork } + +<% unless version == "ga" -%> + if v, ok := original["composer_network_attachment"]; ok { + transformed.ComposerNetworkAttachment = v.(string) + } +<% end -%> + transformedIPAllocationPolicy, err := expandComposerEnvironmentIPAllocationPolicy(original["ip_allocation_policy"], d, config) if err != nil { return nil, err @@ -2934,7 +3041,59 @@ func versionsEqual(old, new string) (bool, error) { return o.Equal(n), nil } -func isComposer3(d *schema.ResourceData, config *transport_tpg.Config) bool { - image_version := d.Get("config.0.software_config.0.image_version").(string) - return strings.Contains(image_version, "composer-3") +func isComposer3(imageVersion string) bool { + return strings.Contains(imageVersion, "composer-3") } + +func forceNewCustomDiff(key string) customdiff.ResourceConditionFunc { + return func(ctx context.Context, d *schema.ResourceDiff, meta interface{}) bool { + old, new := d.GetChange(key) + imageVersion := d.Get("config.0.software_config.0.image_version").(string) + if isComposer3(imageVersion) || tpgresource.CompareSelfLinkRelativePaths("", old.(string), new.(string), nil) { + return false + } + return true + } +} + +func imageVersionChangeValidationFunc(ctx context.Context, old, new, meta any) error { + if old.(string) != "" && !isComposer3(old.(string)) && isComposer3(new.(string)) { + return fmt.Errorf("upgrade to composer 3 is not yet supported") + } + return nil +} + +func validateComposer3FieldUsage(d *schema.ResourceDiff, key string, requireComposer3 bool) error { + _, ok := d.GetOk(key) + imageVersion := d.Get("config.0.software_config.0.image_version").(string) + if ok && ( isComposer3(imageVersion) != requireComposer3 ) { + if requireComposer3 { + return fmt.Errorf("error in configuration, %s should only be used in Composer 3", key) + } else { + return fmt.Errorf("error in configuration, %s should not be used in Composer 3", key) + } + } + return nil +} + +func versionValidationCustomizeDiffFunc(ctx context.Context, d *schema.ResourceDiff, meta interface{}) error { + composer3FieldUsagePolicy := map[string]bool{ + "config.0.node_config.0.max_pods_per_node": false, // not allowed in composer 3 + "config.0.node_config.0.enable_ip_masq_agent": false, + "config.0.node_config.0.config.0.node_config.0.ip_allocation_policy": false, + "config.0.private_environment_config": false, + "config.0.master_authorized_networks_config": false, + "config.0.node_config.0.composer_network_attachment": true, // allowed only in composer 3 + "config.0.node_config.0.composer_internal_ipv4_cidr_block": true, + "config.0.software_config.0.web_server_plugins_mode": true, + "config.0.enable_private_environment": true, + "config.0.enable_private_builds_only": true, + "config.0.workloads_config.0.dag_processor": true, + } + for key, allowed := range composer3FieldUsagePolicy { + if err := validateComposer3FieldUsage(d, key, allowed); err != nil { + return err + } + } + return nil +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.erb b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.erb index 80e870eac07a..bde25f487b1f 100644 --- a/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.erb +++ b/mmv1/third_party/terraform/services/composer/resource_composer_environment_test.go.erb @@ -21,6 +21,7 @@ import ( const testComposerEnvironmentPrefix = "tf-test-composer-env" const testComposerNetworkPrefix = "tf-test-composer-net" const testComposerBucketPrefix = "tf-test-composer-bucket" +const testComposerNetworkAttachmentPrefix = "tf-test-composer-nta" func allComposerServiceAgents() []string { return []string{ @@ -1186,6 +1187,147 @@ func TestAccComposerEnvironmentComposer3_update(t *testing.T) { }) } +func TestAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(envName, networkAttachment, network, subnetwork), + ExpectError: regexp.MustCompile("Conflicting configuration arguments"), + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: true, + Config: testAccComposerEnvironmentComposer3_basic(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + }, + }, + }) +} + +func TestAccComposerEnvironmentComposer3_withNetworkAttachment(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) + fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +func TestAccComposerEnvironmentComposer3_updateWithNetworkAttachment(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) + fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), + }, + { + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + +func TestAccComposerEnvironmentComposer3_updateWithNetworkAndSubnetwork(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + networkAttachment := fmt.Sprintf("%s-%d", testComposerNetworkAttachmentPrefix, acctest.RandInt(t)) + fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachment) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, networkAttachment, network, subnetwork), + }, + { + Config: testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(envName, networkAttachment, network, subnetwork), + }, + { + ResourceName: "google_composer_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + Config: testAccComposerEnvironmentComposer3_withNetworkAttachment(envName, fullFormNetworkAttachmentName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + ExpectNonEmptyPlan: true, + }, + }, + }) +} + // Checks Composer 3 specific updatable fields. func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { t.Parallel() @@ -1195,7 +1337,7 @@ func TestAccComposerEnvironmentComposer3_updateToEmpty(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -1232,7 +1374,7 @@ func TestAccComposerEnvironmentComposer3_updateFromEmpty(t *testing.T) { subnetwork := network + "-1" acctest.VcrTest(t, resource.TestCase{ - PreCheck: func() { acctest.AccTestPreCheck(t) }, + PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), Steps: []resource.TestStep{ @@ -1259,6 +1401,77 @@ func TestAccComposerEnvironmentComposer3_updateFromEmpty(t *testing.T) { }, }) } + +func TestAccComposerEnvironmentComposer3_upgrade_expectError(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + network := fmt.Sprintf("%s-%d", testComposerNetworkPrefix, acctest.RandInt(t)) + subnetwork := network + "-1" + errorRegExp, _ := regexp.Compile(".*upgrade to composer 3 is not yet supported.*") + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork), + }, + { + Config: testAccComposerEnvironmentComposer3_empty(envName, network, subnetwork), + ExpectError: errorRegExp, + }, + // This is a terrible clean-up step in order to get destroy to succeed, + // due to dangling firewall rules left by the Composer Environment blocking network deletion. + // TODO: Remove this check if firewall rules bug gets fixed by Composer. + { + PlanOnly: true, + ExpectNonEmptyPlan: false, + Config: testAccComposerEnvironmentComposer2_empty(envName, network, subnetwork), + Check: testAccCheckClearComposerEnvironmentFirewalls(t, network), + }, + }, + }) +} + +func TestAccComposerEnvironmentComposer2_usesUnsupportedField_expectError(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + errorRegExp, _ := regexp.Compile(".*error in configuration, .* should only be used in Composer 3.*") + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer2_usesUnsupportedField(envName), + ExpectError: errorRegExp, + }, + }, + }) +} + +func TestAccComposerEnvironmentComposer3_usesUnsupportedField_expectError(t *testing.T) { + t.Parallel() + + envName := fmt.Sprintf("%s-%d", testComposerEnvironmentPrefix, acctest.RandInt(t)) + errorRegExp, _ := regexp.Compile(".*error in configuration, .* should not be used in Composer 3.*") + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccComposerEnvironmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComposerEnvironmentComposer3_usesUnsupportedField(envName), + ExpectError: errorRegExp, + }, + }, + }) +} <% end -%> func testAccComposerEnvironment_customBucket(bucketName, envName, network, subnetwork string) string { @@ -2835,6 +3048,34 @@ resource "google_project_iam_member" "composer-worker" { } <% unless version == "ga" -%> +func testAccComposerEnvironmentComposer2_empty(name, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + software_config { + image_version = "composer-2-airflow-2" + } + } +} + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, network, subnetwork) +} + func testAccComposerEnvironmentComposer3_empty(name, network, subnetwork string) string { return fmt.Sprintf(` resource "google_composer_environment" "test" { @@ -2844,6 +3085,10 @@ resource "google_composer_environment" "test" { software_config { image_version = "composer-3-airflow-2" } + node_config { + network = google_compute_network.test.id + subnetwork = google_compute_subnetwork.test.id + } } } @@ -2863,17 +3108,51 @@ resource "google_compute_subnetwork" "test" { `, name, network, subnetwork) } -func testAccComposerEnvironmentComposer3_basic(name, network, subnetwork string) string { - return fmt.Sprintf(` +func testAccComposerEnvironmentComposer2_usesUnsupportedField(name string) string { +return fmt.Sprintf(` resource "google_composer_environment" "test" { name = "%s" region = "us-central1" config { + software_config { + image_version = "composer-2-airflow-2" + web_server_plugins_mode = "ENABLED" + } + } +} +`, name) +} + +func testAccComposerEnvironmentComposer3_usesUnsupportedField(name string) string { +return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + enable_ip_masq_agent = true + } software_config { image_version = "composer-3-airflow-2" } + } +} +`, name) +} + +func testAccComposerEnvironmentComposer3_basic(name, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { node_config { composer_internal_ipv4_cidr_block = "100.64.128.0/20" + network = google_compute_network.test.id + subnetwork = google_compute_subnetwork.test.id + } + software_config { + image_version = "composer-3-airflow-2" } workloads_config { dag_processor { @@ -2910,6 +3189,8 @@ resource "google_composer_environment" "test" { region = "us-central1" config { node_config { + network = google_compute_network.test_1.id + subnetwork = google_compute_subnetwork.test_1.id composer_internal_ipv4_cidr_block = "100.64.128.0/20" } software_config { @@ -2941,7 +3222,168 @@ resource "google_compute_subnetwork" "test" { region = "us-central1" network = google_compute_network.test.self_link } -`, name, network, subnetwork) + +resource "google_compute_network" "test_1" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test_1" { + name = "%s" + ip_cidr_range = "10.3.0.0/16" + region = "us-central1" + network = google_compute_network.test_1.self_link +} +`, name, network, subnetwork, network + "-update", subnetwork + "update") +} + +func testAccComposerEnvironmentComposer3_withNetworkAttachment(name, networkAttachment, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + composer_network_attachment = google_compute_network_attachment.test.id + } + software_config { + image_version = "composer-3-airflow-2" + } + } +} + +resource "google_compute_network_attachment" "test" { + name = "%s" + region = "us-central1" + subnetworks = [ google_compute_subnetwork.test-att.id ] + connection_preference = "ACCEPT_MANUAL" + // Composer 3 is modifying producer_accept_lists outside terraform, ignoring this change for now + lifecycle { + ignore_changes = [producer_accept_lists] + } +} + +resource "google_compute_network" "test-att" { + name = "%s-att" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test-att" { + name = "%s-att" + ip_cidr_range = "10.3.0.0/16" + region = "us-central1" + network = google_compute_network.test-att.self_link +} + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, networkAttachment, network, subnetwork, network, subnetwork) +} + +func testAccComposerEnvironmentComposer3_withNetworkAndSubnetwork(name, networkAttachment, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + network = google_compute_network.test.id + subnetwork = google_compute_subnetwork.test.id + } + software_config { + image_version = "composer-3-airflow-2" + } + } +} + +resource "google_compute_network_attachment" "test" { + name = "%s" + region = "us-central1" + subnetworks = [ google_compute_subnetwork.test-att.id ] + connection_preference = "ACCEPT_MANUAL" + // Composer 3 is modifying producer_accept_lists outside terraform, ignoring this change for now + lifecycle { + ignore_changes = [producer_accept_lists] + } +} + +resource "google_compute_network" "test-att" { + name = "%s-att" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test-att" { + name = "%s-att" + ip_cidr_range = "10.3.0.0/16" + region = "us-central1" + network = google_compute_network.test-att.self_link +} + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, networkAttachment, network, subnetwork, network, subnetwork) +} + +func testAccComposerEnvironmentComposer3_withNetworkSubnetworkAndAttachment_expectError(name, networkAttachment, network, subnetwork string) string { + return fmt.Sprintf(` +resource "google_composer_environment" "test" { + name = "%s" + region = "us-central1" + config { + node_config { + network = google_compute_network.test.id + subnetwork = google_compute_subnetwork.test.id + composer_network_attachment = google_compute_network_attachment.test.id + } + software_config { + image_version = "composer-3-airflow-2" + } + } +} + +resource "google_compute_network_attachment" "test" { + name = "%s" + region = "us-central1" + subnetworks = [ google_compute_subnetwork.test.id ] + connection_preference = "ACCEPT_MANUAL" +} + +// use a separate network to avoid conflicts with other tests running in parallel +// that use the default network/subnet +resource "google_compute_network" "test" { + name = "%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "test" { + name = "%s" + ip_cidr_range = "10.2.0.0/16" + region = "us-central1" + network = google_compute_network.test.self_link +} +`, name, networkAttachment, network, subnetwork) } <% end -%> diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.erb index d61f2457be99..97baae34251f 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager.go.erb @@ -305,6 +305,13 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { Description: `The instance lifecycle policy for this managed instance group.`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "default_action_on_failure": { + Type: schema.TypeString, + Default: "REPAIR", + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"REPAIR", "DO_NOTHING"}, true), + Description: `Default behavior for all instance or health check failures.`, + }, "force_update_on_repair": { Type: schema.TypeString, Default: "NO", @@ -316,7 +323,6 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { }, }, - <% unless version == "ga" -%> "all_instances_config": { Type: schema.TypeList, Optional: true, @@ -341,7 +347,6 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { }, }, }, - <% end -%> "wait_for_instances": { Type: schema.TypeBool, Optional: true, @@ -353,12 +358,7 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { Optional: true, Default: "STABLE", ValidateFunc: validation.StringInSlice([]string{"STABLE", "UPDATED"}, false), - - <% if version == "ga" -%> - Description: `When used with wait_for_instances specifies the status to wait for. When STABLE is specified this resource will wait until the instances are stable before returning. When UPDATED is set, it will wait for the version target to be reached and any per instance configs to be effective as well as all instances to be stable before returning.`, - <% else -%> Description: `When used with wait_for_instances specifies the status to wait for. When STABLE is specified this resource will wait until the instances are stable before returning. When UPDATED is set, it will wait for the version target to be reached and any per instance configs to be effective and all instances configs to be effective as well as all instances to be stable before returning.`, - <% end -%> }, "stateful_internal_ip": { Type: schema.TypeList, @@ -454,7 +454,6 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { }, }, }, - <% unless version == "ga" -%> "all_instances_config": { Type: schema.TypeList, Computed: true, @@ -469,7 +468,6 @@ func ResourceComputeInstanceGroupManager() *schema.Resource { }, }, }, - <% end -%> "stateful": { Type: schema.TypeList, Computed: true, @@ -589,9 +587,7 @@ func resourceComputeInstanceGroupManagerCreate(d *schema.ResourceData, meta inte Versions: expandVersions(d.Get("version").([]interface{})), UpdatePolicy: expandUpdatePolicy(d.Get("update_policy").([]interface{})), InstanceLifecyclePolicy: expandInstanceLifecyclePolicy(d.Get("instance_lifecycle_policy").([]interface{})), - <% unless version == "ga" -%> AllInstancesConfig: expandAllInstancesConfig(nil, d.Get("all_instances_config").([]interface{})), - <% end -%> StatefulPolicy: expandStatefulPolicy(d), // Force send TargetSize to allow a value of 0. @@ -815,13 +811,11 @@ func resourceComputeInstanceGroupManagerRead(d *schema.ResourceData, meta interf if err = d.Set("instance_lifecycle_policy", flattenInstanceLifecyclePolicy(manager.InstanceLifecyclePolicy)); err != nil { return fmt.Errorf("Error setting instance lifecycle policy in state: %s", err.Error()) } - <% unless version == "ga" -%> if manager.AllInstancesConfig != nil { if err = d.Set("all_instances_config", flattenAllInstancesConfig(manager.AllInstancesConfig)); err != nil { return fmt.Errorf("Error setting all_instances_config in state: %s", err.Error()) } } - <% end -%> if err = d.Set("status", flattenStatus(manager.Status)); err != nil { return fmt.Errorf("Error setting status in state: %s", err.Error()) } @@ -892,7 +886,6 @@ func resourceComputeInstanceGroupManagerUpdate(d *schema.ResourceData, meta inte change = true } - <% unless version == "ga" -%> if d.HasChange("all_instances_config") { oldAic, newAic := d.GetChange("all_instances_config") if newAic == nil || len(newAic.([]interface{})) == 0 { @@ -902,7 +895,6 @@ func resourceComputeInstanceGroupManagerUpdate(d *schema.ResourceData, meta inte } change = true } - <% end -%> if d.HasChange("stateful_internal_ip") || d.HasChange("stateful_external_ip") || d.HasChange("stateful_disk") { updatedManager.StatefulPolicy = expandStatefulPolicy(d) @@ -1045,11 +1037,7 @@ func resourceComputeInstanceGroupManagerDelete(d *schema.ResourceData, meta inte func computeIGMWaitForInstanceStatus(d *schema.ResourceData, meta interface{}) error { waitForUpdates := d.Get("wait_for_instances_status").(string) == "UPDATED" conf := resource.StateChangeConf{ - <% if version == "ga" -%> - Pending: []string{"creating", "error", "updating per instance configs", "reaching version target"}, - <% else -%> Pending: []string{"creating", "error", "updating per instance configs", "reaching version target", "updating all instances config"}, - <% end -%> Target: []string{"created"}, Refresh: waitForInstancesRefreshFunc(getManager, waitForUpdates, d, meta), Timeout: d.Timeout(schema.TimeoutCreate), @@ -1207,6 +1195,7 @@ func expandInstanceLifecyclePolicy(configured []interface{}) *compute.InstanceGr for _, raw := range configured { data := raw.(map[string]interface{}) instanceLifecyclePolicy.ForceUpdateOnRepair = data["force_update_on_repair"].(string) + instanceLifecyclePolicy.DefaultActionOnFailure = data["default_action_on_failure"].(string) } return instanceLifecyclePolicy } @@ -1395,12 +1384,12 @@ func flattenInstanceLifecyclePolicy(instanceLifecyclePolicy *compute.InstanceGro if instanceLifecyclePolicy != nil { ilp := map[string]interface{}{} ilp["force_update_on_repair"] = instanceLifecyclePolicy.ForceUpdateOnRepair + ilp["default_action_on_failure"] = instanceLifecyclePolicy.DefaultActionOnFailure results = append(results, ilp) } return results } -<% unless version == "ga" -%> func expandAllInstancesConfig(old []interface{}, new []interface{}) *compute.InstanceGroupManagerAllInstancesConfig { var properties *compute.InstancePropertiesPatch for _, raw := range new { @@ -1456,7 +1445,6 @@ func flattenAllInstancesConfig(allInstancesConfig *compute.InstanceGroupManagerA results = append(results, props) return results } -<% end -%> func flattenStatus(status *compute.InstanceGroupManagerStatus) []map[string]interface{} { results := []map[string]interface{}{} @@ -1465,11 +1453,9 @@ func flattenStatus(status *compute.InstanceGroupManagerStatus) []map[string]inte "stateful": flattenStatusStateful(status.Stateful), "version_target": flattenStatusVersionTarget(status.VersionTarget), } - <% unless version == "ga" -%> if status.AllInstancesConfig != nil { data["all_instances_config"] = flattenStatusAllInstancesConfig(status.AllInstancesConfig) } - <% end -%> results = append(results, data) return results } @@ -1502,7 +1488,6 @@ func flattenStatusVersionTarget(versionTarget *compute.InstanceGroupManagerStatu return results } -<% unless version == "ga" -%> func flattenStatusAllInstancesConfig(allInstancesConfig *compute.InstanceGroupManagerStatusAllInstancesConfig) []map[string]interface{} { results := []map[string]interface{}{} data := map[string]interface{}{ @@ -1511,7 +1496,6 @@ func flattenStatusAllInstancesConfig(allInstancesConfig *compute.InstanceGroupMa results = append(results, data) return results } -<% end -%> func resourceInstanceGroupManagerStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { if err := d.Set("wait_for_instances", false); err != nil { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.erb index 7c3322419219..a600804fbf66 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_group_manager_test.go.erb @@ -116,6 +116,9 @@ func TestAccInstanceGroupManager_update(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccInstanceGroupManager_update(template1, target1, description, igm), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_instance_group_manager.igm-update", "instance_lifecycle_policy.0.default_action_on_failure", "DO_NOTHING"), + ), }, { ResourceName: "google_compute_instance_group_manager.igm-update", @@ -125,6 +128,9 @@ func TestAccInstanceGroupManager_update(t *testing.T) { }, { Config: testAccInstanceGroupManager_update2(template1, target1, target2, template2, description, igm), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_instance_group_manager.igm-update", "instance_lifecycle_policy.0.default_action_on_failure", "REPAIR"), + ), }, { ResourceName: "google_compute_instance_group_manager.igm-update", @@ -134,6 +140,9 @@ func TestAccInstanceGroupManager_update(t *testing.T) { }, { Config: testAccInstanceGroupManager_update3(template1, target1, target2, template2, description2, igm), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_instance_group_manager.igm-update", "instance_lifecycle_policy.0.default_action_on_failure", "REPAIR"), + ), }, { ResourceName: "google_compute_instance_group_manager.igm-update", @@ -660,7 +669,6 @@ resource "google_compute_instance_group_manager" "igm-update" { name = "customhttp" port = 8080 } -<% unless version == "ga" -%> all_instances_config { metadata = { foo = "bar" @@ -669,10 +677,10 @@ resource "google_compute_instance_group_manager" "igm-update" { doo = "dad" } } -<% end -%> instance_lifecycle_policy { force_update_on_repair = "YES" + default_action_on_failure = "DO_NOTHING" } } `, template, target, description, igm) @@ -766,7 +774,6 @@ resource "google_compute_instance_group_manager" "igm-update" { port = 8443 } -<% unless version == "ga" -%> all_instances_config { metadata = { doo = "dad" @@ -775,10 +782,10 @@ resource "google_compute_instance_group_manager" "igm-update" { foo = "bar" } } -<% end -%> instance_lifecycle_policy { force_update_on_repair = "NO" + default_action_on_failure = "REPAIR" } } `, template1, target1, target2, template2, description, igm) @@ -1780,7 +1787,6 @@ resource "google_compute_instance_group_manager" "igm-basic" { max_surge_fixed = 0 max_unavailable_percent = 50 } -<% unless version == "ga" -%> all_instances_config { metadata = { doo = "dad" @@ -1789,9 +1795,9 @@ resource "google_compute_instance_group_manager" "igm-basic" { foo = "bar" } } -<% end -%> instance_lifecycle_policy { force_update_on_repair = "YES" + default_action_on_failure = "REPAIR" } wait_for_instances = true wait_for_instances_status = "UPDATED" diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager.go.erb index 8530b1e3e6b5..8c756656264c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager.go.erb @@ -208,12 +208,7 @@ func ResourceComputeRegionInstanceGroupManager() *schema.Resource { Optional: true, Default: "STABLE", ValidateFunc: validation.StringInSlice([]string{"STABLE", "UPDATED"}, false), - - <% if version == "ga" -%> - Description: `When used with wait_for_instances specifies the status to wait for. When STABLE is specified this resource will wait until the instances are stable before returning. When UPDATED is set, it will wait for the version target to be reached and any per instance configs to be effective as well as all instances to be stable before returning.`, - <% else -%> Description: `When used with wait_for_instances specifies the status to wait for. When STABLE is specified this resource will wait until the instances are stable before returning. When UPDATED is set, it will wait for the version target to be reached and any per instance configs to be effective and all instances configs to be effective as well as all instances to be stable before returning.`, - <% end -%> }, "auto_healing_policies": { @@ -268,6 +263,13 @@ func ResourceComputeRegionInstanceGroupManager() *schema.Resource { Description: `The instance lifecycle policy for this managed instance group.`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "default_action_on_failure": { + Type: schema.TypeString, + Default: "REPAIR", + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"REPAIR", "DO_NOTHING"}, true), + Description: `Default behavior for all instance or health check failures.`, + }, "force_update_on_repair": { Type: schema.TypeString, Default: "NO", @@ -365,7 +367,6 @@ func ResourceComputeRegionInstanceGroupManager() *schema.Resource { }, }, }, - <% unless version == "ga" -%> "all_instances_config": { Type: schema.TypeList, Optional: true, @@ -390,7 +391,6 @@ func ResourceComputeRegionInstanceGroupManager() *schema.Resource { }, }, }, - <% end -%> "stateful_internal_ip": { Type: schema.TypeList, Optional: true, @@ -481,7 +481,6 @@ func ResourceComputeRegionInstanceGroupManager() *schema.Resource { }, }, }, - <% unless version == "ga" -%> "all_instances_config": { Type: schema.TypeList, Computed: true, @@ -496,7 +495,6 @@ func ResourceComputeRegionInstanceGroupManager() *schema.Resource { }, }, }, - <% end -%> "stateful": { Type: schema.TypeList, Computed: true, @@ -562,9 +560,7 @@ func resourceComputeRegionInstanceGroupManagerCreate(d *schema.ResourceData, met Versions: expandVersions(d.Get("version").([]interface{})), UpdatePolicy: expandRegionUpdatePolicy(d.Get("update_policy").([]interface{})), InstanceLifecyclePolicy: expandInstanceLifecyclePolicy(d.Get("instance_lifecycle_policy").([]interface{})), - <% unless version == "ga" -%> AllInstancesConfig: expandAllInstancesConfig(nil, d.Get("all_instances_config").([]interface{})), - <% end -%> DistributionPolicy: expandDistributionPolicy(d), StatefulPolicy: expandStatefulPolicy(d), // Force send TargetSize to allow size of 0. @@ -602,11 +598,7 @@ func resourceComputeRegionInstanceGroupManagerCreate(d *schema.ResourceData, met func computeRIGMWaitForInstanceStatus(d *schema.ResourceData, meta interface{}) error { waitForUpdates := d.Get("wait_for_instances_status").(string) == "UPDATED" conf := resource.StateChangeConf{ - <% if version == "ga" -%> - Pending: []string{"creating", "error", "updating per instance configs", "reaching version target"}, - <% else -%> Pending: []string{"creating", "error", "updating per instance configs", "reaching version target", "updating all instances config"}, - <% end -%> Target: []string{"created"}, Refresh: waitForInstancesRefreshFunc(getRegionalManager, waitForUpdates, d, meta), Timeout: d.Timeout(schema.TimeoutCreate), @@ -674,11 +666,9 @@ func waitForInstancesRefreshFunc(f getInstanceManagerFunc, waitForUpdates bool, if !m.Status.VersionTarget.IsReached { return false, "reaching version target", nil } - <% unless version == "ga" -%> if !m.Status.AllInstancesConfig.Effective { return false, "updating all instances config", nil } - <% end -%> } return true, "created", nil } else { @@ -763,13 +753,11 @@ func resourceComputeRegionInstanceGroupManagerRead(d *schema.ResourceData, meta if err = d.Set("instance_lifecycle_policy", flattenInstanceLifecyclePolicy(manager.InstanceLifecyclePolicy)); err != nil { return fmt.Errorf("Error setting instance lifecycle policy in state: %s", err.Error()) } - <% unless version == "ga" -%> if manager.AllInstancesConfig != nil { if err = d.Set("all_instances_config", flattenAllInstancesConfig(manager.AllInstancesConfig)); err != nil { return fmt.Errorf("Error setting all_instances_config in state: %s", err.Error()) } } - <% end -%> if err = d.Set("stateful_disk", flattenStatefulPolicy(manager.StatefulPolicy)); err != nil { return fmt.Errorf("Error setting stateful_disk in state: %s", err.Error()) } @@ -852,7 +840,6 @@ func resourceComputeRegionInstanceGroupManagerUpdate(d *schema.ResourceData, met change = true } - <% unless version == "ga" -%> if d.HasChange("all_instances_config") { oldAic, newAic := d.GetChange("all_instances_config") if newAic == nil || len(newAic.([]interface{})) == 0 { @@ -862,7 +849,6 @@ func resourceComputeRegionInstanceGroupManagerUpdate(d *schema.ResourceData, met } change = true } - <% end -%> if d.HasChange("list_managed_instances_results") { updatedManager.ListManagedInstancesResults = d.Get("list_managed_instances_results").(string) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager_test.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager_test.go.erb index a5470d09b032..c071b8b13f05 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager_test.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_instance_group_manager_test.go.erb @@ -83,6 +83,9 @@ func TestAccRegionInstanceGroupManager_update(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccRegionInstanceGroupManager_update(template1, target1, igm), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_region_instance_group_manager.igm-update", "instance_lifecycle_policy.0.default_action_on_failure", "DO_NOTHING"), + ), }, { ResourceName: "google_compute_region_instance_group_manager.igm-update", @@ -92,6 +95,9 @@ func TestAccRegionInstanceGroupManager_update(t *testing.T) { }, { Config: testAccRegionInstanceGroupManager_update2(template1, target1, target2, template2, igm), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_region_instance_group_manager.igm-update", "instance_lifecycle_policy.0.default_action_on_failure", "REPAIR"), + ), }, { ResourceName: "google_compute_region_instance_group_manager.igm-update", @@ -101,6 +107,9 @@ func TestAccRegionInstanceGroupManager_update(t *testing.T) { }, { Config: testAccRegionInstanceGroupManager_update3(template1, target1, target2, template2, igm), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("google_compute_region_instance_group_manager.igm-update", "instance_lifecycle_policy.0.default_action_on_failure", "REPAIR"), + ), }, { ResourceName: "google_compute_region_instance_group_manager.igm-update", @@ -562,7 +571,6 @@ resource "google_compute_region_instance_group_manager" "igm-update" { port = 8080 } -<% unless version == "ga" -%> all_instances_config { metadata = { foo = "bar" @@ -571,10 +579,10 @@ resource "google_compute_region_instance_group_manager" "igm-update" { doo = "dad" } } -<% end -%> instance_lifecycle_policy { force_update_on_repair = "YES" + default_action_on_failure = "DO_NOTHING" } } `, template, target, igm) @@ -668,7 +676,6 @@ resource "google_compute_region_instance_group_manager" "igm-update" { port = 8443 } -<% unless version == "ga" -%> all_instances_config { metadata = { doo = "dad" @@ -677,10 +684,10 @@ resource "google_compute_region_instance_group_manager" "igm-update" { foo = "bar" } } -<% end -%> instance_lifecycle_policy { force_update_on_repair = "NO" + default_action_on_failure = "REPAIR" } } `, template1, target1, target2, template2, igm) diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.erb index 6b7cfd194215..535218117c07 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_region_target_tcp_proxy_test.go.erb @@ -13,9 +13,9 @@ import ( func TestAccComputeRegionTargetTcpProxy_update(t *testing.T) { t.Parallel() - target := fmt.Sprintf("trtcp-test-%s", acctest.RandString(t, 10)) - backend := fmt.Sprintf("trtcp-test-%s", acctest.RandString(t, 10)) - hc := fmt.Sprintf("trtcp-test-%s", acctest.RandString(t, 10)) + target := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + backend := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + hc := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, diff --git a/mmv1/third_party/terraform/services/container/node_config.go.erb b/mmv1/third_party/terraform/services/container/node_config.go.erb index 8d53ce85b51d..d264b4dcc8de 100644 --- a/mmv1/third_party/terraform/services/container/node_config.go.erb +++ b/mmv1/third_party/terraform/services/container/node_config.go.erb @@ -680,14 +680,12 @@ func schemaNodeConfig() *schema.Schema { Optional: true, Description: `A map of resource manager tags. Resource manager tag keys and values have the same definition as resource manager tags. Keys must be in the format tagKeys/{tag_key_id}, and values are in the format tagValues/456. The field is ignored (both PUT & PATCH) when empty.`, }, - <% unless version == 'ga' -%> "enable_confidential_storage": { Type: schema.TypeBool, Optional: true, ForceNew: true, Description: `If enabled boot disks are configured with confidential mode.`, }, - <% end -%> }, }, } @@ -978,11 +976,9 @@ func expandNodeConfig(v interface{}) *container.NodeConfig { nc.SoleTenantConfig = expandSoleTenantConfig(v) } - <% unless version == 'ga' -%> if v,ok := nodeConfig["enable_confidential_storage"]; ok { nc.EnableConfidentialStorage = v.(bool) } - <% end -%> <% unless version == "ga" -%> if v, ok := nodeConfig["host_maintenance_policy"]; ok { @@ -1236,9 +1232,7 @@ func flattenNodeConfig(c *container.NodeConfig, v interface{}) []map[string]inte "sole_tenant_config": flattenSoleTenantConfig(c.SoleTenantConfig), "fast_socket": flattenFastSocket(c.FastSocket), "resource_manager_tags": flattenResourceManagerTags(c.ResourceManagerTags), - <% unless version == 'ga' -%> "enable_confidential_storage": c.EnableConfidentialStorage, - <% end -%> }) if len(c.OauthScopes) > 0 { diff --git a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.erb b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.erb index a542384379f6..910fba9602b0 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.erb +++ b/mmv1/third_party/terraform/services/container/resource_container_cluster_test.go.erb @@ -4230,7 +4230,7 @@ func TestAccContainerCluster_withGatewayApiConfig(t *testing.T) { Steps: []resource.TestStep{ { Config: testAccContainerCluster_withGatewayApiConfig(clusterName, "CANARY", networkName, subnetworkName), - ExpectError: regexp.MustCompile(`expected gateway_api_config\.0\.channel to be one of \[CHANNEL_DISABLED CHANNEL_EXPERIMENTAL CHANNEL_STANDARD\], got CANARY`), + ExpectError: regexp.MustCompile(`expected gateway_api_config\.0\.channel to be one of [^,]+, got CANARY`), }, { Config: testAccContainerCluster_withGatewayApiConfig(clusterName, "CHANNEL_DISABLED", networkName, subnetworkName), @@ -9389,7 +9389,6 @@ func testAccContainerCluster_additional_pod_ranges_config(name string, nameCount `, name, name, name, aprc) } -<% unless version == 'ga' -%> func TestAccContainerCluster_withConfidentialBootDisk(t *testing.T) { t.Parallel() @@ -9558,7 +9557,6 @@ resource "google_container_cluster" "without_confidential_boot_disk" { } `, clusterName, npName, networkName, subnetworkName) } -<% end -%> <% unless version == 'ga' -%> func testAccContainerCluster_withWorkloadALTSConfig(projectID, name, networkName, subnetworkName string, enable bool) string { diff --git a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.erb b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.erb index 42939e9b7e9b..ec3e852d8ce6 100644 --- a/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.erb +++ b/mmv1/third_party/terraform/services/container/resource_container_node_pool_test.go.erb @@ -4135,8 +4135,6 @@ resource "google_container_node_pool" "np" { } <% end -%> -<% unless version == 'ga' -%> - func TestAccContainerNodePool_withConfidentialBootDisk(t *testing.T) { t.Parallel() @@ -4262,7 +4260,6 @@ resource "google_container_node_pool" "without_confidential_boot_disk" { } `, cluster, networkName, subnetworkName, np) } -<% end -%> func testAccContainerNodePool_resourceManagerTags(projectID, clusterName, networkName, subnetworkName, randomSuffix string) string { return fmt.Sprintf(` diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_device_check_config_test.go.erb b/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_device_check_config_test.go.erb new file mode 100644 index 000000000000..156a2cac1e59 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/resource_firebase_app_check_device_check_config_test.go.erb @@ -0,0 +1,63 @@ +<% autogen_exception -%> +package firebaseappcheck_test +<% unless version == 'ga' -%> + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseAppCheckDeviceCheckConfig_firebaseAppCheckDeviceCheckConfigUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "team_id": "9987654321", + "private_key_path": "test-fixtures/private-key.p8", + "token_ttl": "3900s", + "random_suffix": acctest.RandString(t, 10), + } + + contextUpdated := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "team_id": "9987654321", + "private_key_path": "test-fixtures/private-key-2.p8", + "token_ttl": "7200s", + // Bundle ID needs to be the same between updates but different between tests + "random_suffix": context["random_suffix"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppCheckDeviceCheckConfig_firebaseAppCheckDeviceCheckConfigFullExample(context), + }, + { + ResourceName: "google_firebase_app_check_device_check_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"private_key", "app_id"}, + }, + { + Config: testAccFirebaseAppCheckDeviceCheckConfig_firebaseAppCheckDeviceCheckConfigFullExample(contextUpdated), + }, + { + ResourceName: "google_firebase_app_check_device_check_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"private_key", "app_id"}, + }, + }, + }) +} +<% end -%> diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/test-fixtures/private-key-2.p8 b/mmv1/third_party/terraform/services/firebaseappcheck/test-fixtures/private-key-2.p8 new file mode 100644 index 000000000000..f581bea0e054 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/test-fixtures/private-key-2.p8 @@ -0,0 +1,15 @@ +-----BEGIN PRIVATE KEY----- +MIICWwIBAAKBgQCVA/2LQtUYJI8KlNHWzNPzGzVv01qavSbmuW0QYjshxRnXDBk+ +fWZePJAmsyuhU4Y2SkM5Wqvgjo/rDPaRPdTiEtKQuNesRgQeOVmAWDkIXEiieTwb +RYuXbdpZhH86Vt6xOMt14tGPKE5VuuySvTqgQRCvRTylrF3koBc0d/8NVQIDAQAB +AoGAG7qBXH+ULYjoAR0OKv00V2FxwRxAGNknuvk4HTtaK3+Evmpm7CTjfpegb0MZ +1Ew5hjKtbae8oe2FRETGQOKTkS68I/D9PGP4aTzmSkf6PjwXwhlBYp09xxv4nmxV +BCbsoicNMvdk0F7SPblnZBO9i0DpZ8pT9wyPo8QzWBfi5IECQQD8gIOja3Zim4R9 +HVL7Blvhzhl2ibuITV2PKfQ11v0a+Om+rZKwdrhxKgWoguDvvP7ExWSPTZJKSm0J +bzhU+APhAkEAlxR3fY+zSpxHaxbOqZ6cea5cZtyHcX607nW8N037yBErIjcJKL65 +gHx9Vq1Xo24o4C6kyzmh00BnkyXul4439QJAPWvtmaUcaSQ3eE/XzaRgWW9PFlyu +t5tKNPcZprcjXppKEc4bLr3SZAS616DuoqKwvqDds1ZFTbkJCRB6/YBPQQJAeyGG +JYKJyKRIHMJw2pNXymBOFNNlXB29jp/ML3LSYwODGRar01ZmT46mhI8wXxV4IQZC +7xLgjhDumWIP69tQRQJAfuOy4TP2drxNI7fP7YenV1ks6CiLHcBN04f6NItWilTN +Cc+Mv/rio9xO56Yp9oePMaFT9QEzfO/cqX6QvyfblQ== +-----END PRIVATE KEY----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/test-fixtures/private-key.p8 b/mmv1/third_party/terraform/services/firebaseappcheck/test-fixtures/private-key.p8 new file mode 100644 index 000000000000..d48a562a97bd --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/test-fixtures/private-key.p8 @@ -0,0 +1,15 @@ +-----BEGIN PRIVATE KEY----- +MIICXAIBAAKBgG3vDegwb8uUvns/Iuodo/cNK0eMHxqb+2n16dQnxL7az+ShNWKQ +jTSzXY5y4VexrTdPEU5ZiTPONZXyl4/iFvOnyFxnC6Zjyr+xeIU5X4TmjYq0yCuZ +xbovAWw+E4KUKt1V62avd+hGZHPtCKLfV/uYITG7I8R+GyEAdMoaXP8JAgMBAAEC +gYBsQFf7aabMWx3uks84phNNNOSHQY1nUG2Te5nTVY3BOgo44h2Ffz3sxSq9GJaZ +GdatfehWtIgMQWQ20Xk5L7LUzSxmndHbUIzYU17xZrAsgmjYTwvAQ13If2L6S+pz +EUbTLkMnlbAgvtJ2AqZZZ3LE41N9ey60gVB1cCu9fCXLuQJBANAeoDXXvh7nXdyN +Zd84zXpSk8SLmAmQn1JB7z3oPkrZ0dG42GMOzYw9MP8n2bATHV+OB0/gdUFJAYYp +kwz+bJ8CQQCHObHelAlkH3or+JVECpovNMHs2UGB6yF1ZX0Nep3iR90fhi3BsnVo +IQGdHlQC2NL+iaBF4Mv2/dfZTen1vMtXAkEAk7+KQW8+G7ZpXjBLyCMNTO/e08O+ +VdwEH2OLsslzn7PvTxIJHJnfttWiOSJTWrrXOYUdD8nrtENd/574NFtTRQJAaExD +uJ0NsT/mB0wwNM7IpWhXusrHD+G/aMDidyb/56vuDYZ8fE2c6LesevcNbTS3aMPV +7o+4QcUAWwcRUQxQ+QJBAJEAwwzFnLJtrFYEnz7YNufgjiMrX7CBJCwrXGZpZrHX +EdDDOGiLrm871hc3tNQWmzou9AFIwZFeIOXVdIHIQzk= +-----END PRIVATE KEY----- \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firestore/resource_firestore_field_test.go b/mmv1/third_party/terraform/services/firestore/resource_firestore_field_test.go index 794b5f8369a5..8be524c6e99a 100644 --- a/mmv1/third_party/terraform/services/firestore/resource_firestore_field_test.go +++ b/mmv1/third_party/terraform/services/firestore/resource_firestore_field_test.go @@ -103,7 +103,11 @@ resource "google_firestore_database" "database" { location_id = "nam5" type = "FIRESTORE_NATIVE" - depends_on = [google_project_service.firestore] + # used to control delete order + depends_on = [ + google_project_service.firestore, + google_project.project + ] } `, context) } else { @@ -115,7 +119,7 @@ resource "google_firestore_database" "database" { type = "FIRESTORE_NATIVE" delete_protection_state = "DELETE_PROTECTION_DISABLED" - deletion_policy = "DELETE" + deletion_policy = "DELETE" } `, context) } diff --git a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.erb b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.erb index d231ea19e62c..a7ab46c640a6 100644 --- a/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.erb +++ b/mmv1/third_party/terraform/services/gkehub/resource_gke_hub_feature_membership_test.go.erb @@ -1008,6 +1008,17 @@ func TestAccGKEHubFeatureMembership_gkehubFeaturePolicyController(t *testing.T) ImportState: true, ImportStateVerify: true, }, + { + Config: testAccGKEHubFeatureMembership_policycontrollerUpdateMaps(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "policycontroller", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, }, }) } @@ -1064,9 +1075,92 @@ resource "google_gke_hub_feature_membership" "feature_member" { "PROMETHEUS" ] } + deployment_configs { + component_name = "admission" + replica_count = 3 + pod_affinity = "ANTI_AFFINITY" + container_resources { + limits { + memory = "1Gi" + cpu = "1.5" + } + requests { + memory = "500Mi" + cpu = "150m" + } + } + pod_tolerations { + key = "key1" + operator = "Equal" + value = "value1" + effect = "NoSchedule" + } + } + deployment_configs { + component_name = "mutation" + replica_count = 3 + pod_affinity = "ANTI_AFFINITY" + } policy_content { template_library { - installation = "NOT_INSTALLED" + installation = "ALL" + } + bundles { + bundle_name = "pci-dss-v3.2.1" + exempted_namespaces = ["sample-namespace"] + } + bundles { + bundle_name = "nist-sp-800-190" + } + } + } + version = "1.17.0" + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_policycontrollerUpdateMaps(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "policycontroller" + location = "global" + depends_on = [google_project_service.container, google_project_service.gkehub, google_project_service.poco] +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + policycontroller { + policy_controller_hub_config { + install_spec = "INSTALL_SPEC_SUSPENDED" + constraint_violation_limit = 50 + referential_rules_enabled = true + log_denies_enabled = true + mutation_enabled = true + monitoring { + backends = [ + "PROMETHEUS" + ] + } + deployment_configs { + component_name = "admission" + pod_affinity = "NO_AFFINITY" + } + deployment_configs { + component_name = "audit" + container_resources { + limits { + memory = "1Gi" + cpu = "1.5" + } + requests { + memory = "500Mi" + cpu = "150m" + } } } } diff --git a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_test.go b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_test.go index 967fe08c4bc2..be00b628a7c6 100644 --- a/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_test.go +++ b/mmv1/third_party/terraform/services/gkehub2/resource_gke_hub_scope_test.go @@ -47,6 +47,11 @@ func testAccGKEHub2Scope_gkehubScopeBasicExample_basic(context map[string]interf return acctest.Nprintf(` resource "google_gke_hub_scope" "scope" { scope_id = "tf-test-scope%{random_suffix}" + namespace_labels = { + keyb = "valueb" + keya = "valuea" + keyc = "valuec" + } labels = { keyb = "valueb" keya = "valuea" @@ -60,6 +65,11 @@ func testAccGKEHub2Scope_gkehubScopeBasicExample_update(context map[string]inter return acctest.Nprintf(` resource "google_gke_hub_scope" "scope" { scope_id = "tf-test-scope%{random_suffix}" + namespace_labels = { + updated_keyb = "updated_valueb" + updated_keya = "updated_valuea" + updated_keyc = "updated_valuec" + } labels = { updated_keyb = "updated_valueb" updated_keya = "updated_valuea" diff --git a/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go b/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go index 328fc50734d7..29c650fa0ecb 100644 --- a/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go +++ b/mmv1/third_party/terraform/services/kms/resource_kms_crypto_key_test.go @@ -444,6 +444,35 @@ func TestAccKmsCryptoKeyVersion_basic(t *testing.T) { }) } +func TestAccKmsCryptoKeyVersionWithSymmetricHSM(t *testing.T) { + t.Parallel() + + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + projectOrg := envvar.GetTestOrgFromEnv(t) + projectBillingAccount := envvar.GetTestBillingAccountFromEnv(t) + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testGoogleKmsCryptoKeyVersionWithSymmetricHSM(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName), + }, + { + ResourceName: "google_kms_crypto_key_version.crypto_key_version", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testGoogleKmsCryptoKeyVersion_removed(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName), + }, + }, + }) +} + func TestAccKmsCryptoKeyVersion_skipInitialVersion(t *testing.T) { t.Parallel() @@ -747,6 +776,44 @@ resource "google_kms_crypto_key_version" "crypto_key_version" { `, projectId, projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName) } +func testGoogleKmsCryptoKeyVersionWithSymmetricHSM(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + name = "%s" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "acceptance" { + project = google_project.acceptance.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.acceptance.project + name = "%s" + location = "us-central1" +} + +resource "google_kms_crypto_key" "crypto_key" { + name = "%s" + key_ring = google_kms_key_ring.key_ring.id + labels = { + key = "value" + } + version_template { + algorithm = "GOOGLE_SYMMETRIC_ENCRYPTION" + protection_level = "HSM" + } +} + +resource "google_kms_crypto_key_version" "crypto_key_version" { + crypto_key = google_kms_crypto_key.crypto_key.id +} +`, projectId, projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName) +} + func testGoogleKmsCryptoKeyVersion_removed(projectId, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName string) string { return fmt.Sprintf(` resource "google_project" "acceptance" { diff --git a/mmv1/third_party/terraform/services/kms/resource_kms_ekm_connection_test.go b/mmv1/third_party/terraform/services/kms/resource_kms_ekm_connection_test.go new file mode 100644 index 000000000000..38d9b6c39fd5 --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/resource_kms_ekm_connection_test.go @@ -0,0 +1,139 @@ +package kms_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccKMSEkmConnection_kmsEkmConnectionBasicExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKMSEkmConnection_kmsEkmConnectionBasicExample_full(context), + }, + { + ResourceName: "google_kms_ekm_connection.example-ekmconnection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name"}, + }, + { + Config: testAccKMSEkmConnection_kmsEkmConnectionBasicExample_update(context), + }, + { + ResourceName: "google_kms_ekm_connection.example-ekmconnection", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "name"}, + }, + }, + }) +} + +func testAccKMSEkmConnection_kmsEkmConnectionBasicExample_full(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_secret_manager_secret_version" "raw_der" { + secret = "playground-cert" + project = "315636579862" +} +data "google_secret_manager_secret_version" "hostname" { + secret = "external-uri" + project = "315636579862" +} +data "google_secret_manager_secret_version" "servicedirectoryservice" { + secret = "external-servicedirectoryservice" + project = "315636579862" +} +data "google_project" "vpc-project" { + project_id = "cloud-ekm-refekm-playground" +} +data "google_project" "project" { +} +resource "google_project_iam_member" "add_sdviewer" { + project = data.google_project.vpc-project.number + role = "roles/servicedirectory.viewer" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-ekms.iam.gserviceaccount.com" +} +resource "google_project_iam_member" "add_pscAuthorizedService" { + project = data.google_project.vpc-project.number + role = "roles/servicedirectory.pscAuthorizedService" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-ekms.iam.gserviceaccount.com" +} +resource "google_kms_ekm_connection" "example-ekmconnection" { + name = "tf_test_ekmconnection_example%{random_suffix}" + location = "us-central1" + key_management_mode = "MANUAL" + service_resolvers { + service_directory_service = data.google_secret_manager_secret_version.servicedirectoryservice.secret_data + hostname = data.google_secret_manager_secret_version.hostname.secret_data + server_certificates { + raw_der = data.google_secret_manager_secret_version.raw_der.secret_data + } + } + depends_on = [ + google_project_iam_member.add_pscAuthorizedService, + google_project_iam_member.add_sdviewer + ] +} +`, context) +} + +func testAccKMSEkmConnection_kmsEkmConnectionBasicExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "vpc-project" { + project_id = "cloud-ekm-refekm-playground" +} +data "google_project" "project" { +} +data "google_secret_manager_secret_version" "raw_der" { + secret = "playground-cert" + project = "315636579862" +} +data "google_secret_manager_secret_version" "hostname" { + secret = "external-uri" + project = "315636579862" +} +data "google_secret_manager_secret_version" "servicedirectoryservice" { + secret = "external-servicedirectoryservice" + project = "315636579862" +} +resource "google_project_iam_member" "add_sdviewer_updateekmconnection" { + project = data.google_project.vpc-project.number + role = "roles/servicedirectory.viewer" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-ekms.iam.gserviceaccount.com" +} +resource "google_project_iam_member" "add_pscAuthorizedService_updateekmconnection" { + project = data.google_project.vpc-project.number + role = "roles/servicedirectory.pscAuthorizedService" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-ekms.iam.gserviceaccount.com" +} +resource "google_kms_ekm_connection" "example-ekmconnection" { + name = "tf_test_ekmconnection_example%{random_suffix}" + location = "us-central1" + key_management_mode = "CLOUD_KMS" + crypto_space_path = "v0/longlived/crypto-space-placeholder" + service_resolvers { + service_directory_service = data.google_secret_manager_secret_version.servicedirectoryservice.secret_data + hostname = data.google_secret_manager_secret_version.hostname.secret_data + server_certificates { + raw_der = data.google_secret_manager_secret_version.raw_der.secret_data + } + } + depends_on = [ + google_project_iam_member.add_pscAuthorizedService_updateekmconnection, + google_project_iam_member.add_sdviewer_updateekmconnection + ] +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_hub_sweeper.go.erb b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_hub_sweeper.go.erb new file mode 100644 index 000000000000..573e4e0838fa --- /dev/null +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_hub_sweeper.go.erb @@ -0,0 +1,127 @@ +<% autogen_exception -%> +package networkconnectivity + +<% unless version == "ga" -%> + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/sweeper" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func init() { + sweeper.AddTestSweepers("NetworkConnectivityHub", testSweepNetworkConnectivityHub) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepNetworkConnectivityHub(region string) error { + resourceName := "NetworkConnectivityHub" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sweeper.SharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := envvar.GetTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://networkconnectivity.googleapis.com/v1/projects/{{project}}/locations/global/hubs", "?")[0] + listUrl, err := tpgresource.ReplaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: listUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["hubs"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + return nil + } + + name := tpgresource.GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !sweeper.IsSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://networkconnectivity.googleapis.com/v1/projects/{{project}}/locations/global/hubs/{{name}}" + deleteUrl, err := tpgresource.ReplaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} + +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_sweeper.go.erb b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_sweeper.go.erb new file mode 100644 index 000000000000..f1f1e49cb9a3 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkconnectivity/resource_network_connectivity_spoke_sweeper.go.erb @@ -0,0 +1,127 @@ +<% autogen_exception -%> +package networkconnectivity + +<% unless version == "ga" -%> + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/sweeper" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func init() { + sweeper.AddTestSweepers("NetworkConnectivitySpoke", testSweepNetworkConnectivitySpoke) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepNetworkConnectivitySpoke(region string) error { + resourceName := "NetworkConnectivitySpoke" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sweeper.SharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := envvar.GetTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://networkconnectivity.googleapis.com/v1/projects/{{project}}/locations/global/spokes", "?")[0] + listUrl, err := tpgresource.ReplaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: listUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["spokes"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + return nil + } + + name := tpgresource.GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !sweeper.IsSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://networkconnectivity.googleapis.com/v1/projects/{{project}}/locations/global/spokes/{{name}}" + deleteUrl, err := tpgresource.ReplaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} + +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.erb b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.erb index efa7a1d6055a..74669dc44525 100644 --- a/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.erb +++ b/mmv1/third_party/terraform/services/notebooks/resource_notebooks_runtime_test.go.erb @@ -25,6 +25,7 @@ func TestAccNotebooksRuntime_update(t *testing.T) { ResourceName: "google_notebooks_runtime.runtime", ImportState: true, ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { Config: testAccNotebooksRuntime_update(context), @@ -33,6 +34,7 @@ func TestAccNotebooksRuntime_update(t *testing.T) { ResourceName: "google_notebooks_runtime.runtime", ImportState: true, ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, { Config: testAccNotebooksRuntime_basic(context), @@ -41,6 +43,7 @@ func TestAccNotebooksRuntime_update(t *testing.T) { ResourceName: "google_notebooks_runtime.runtime", ImportState: true, ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, }, }, }) @@ -101,6 +104,9 @@ resource "google_notebooks_runtime" "runtime" { reserved_ip_range = "192.168.255.0/24" } } + labels = { + k = "val" + } } `, context) } diff --git a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go index acf1db96fa56..1d2c97855fe0 100644 --- a/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go +++ b/mmv1/third_party/terraform/services/resourcemanager/resource_google_project.go @@ -227,7 +227,13 @@ func resourceGoogleProjectCreate(d *schema.ResourceData, meta interface{}) error return errwrap.Wrapf("Error enabling the Compute Engine API required to delete the default network: {{err}} ", err) } - if err = forceDeleteComputeNetwork(d, config, project.ProjectId, "default"); err != nil { + err = forceDeleteComputeNetwork(d, config, project.ProjectId, "default") + // Retry if API is not yet enabled. + if err != nil && transport_tpg.IsGoogleApiErrorWithCode(err, 403) { + time.Sleep(10 * time.Second) + err = forceDeleteComputeNetwork(d, config, project.ProjectId, "default") + } + if err != nil { if transport_tpg.IsGoogleApiErrorWithCode(err, 404) { log.Printf("[DEBUG] Default network not found for project %q, no need to delete it", project.ProjectId) } else { diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.erb b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.erb index 8f9d429e3408..ed7422389d82 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.erb +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket.go.erb @@ -1237,10 +1237,10 @@ func flattenBucketLifecycle(d *schema.ResourceData, lifecycle *storage.BucketLif rules := make([]map[string]interface{}, 0, len(lifecycle.Rule)) - for _, rule := range lifecycle.Rule { + for index, rule := range lifecycle.Rule { rules = append(rules, map[string]interface{}{ "action": schema.NewSet(resourceGCSBucketLifecycleRuleActionHash, []interface{}{flattenBucketLifecycleRuleAction(rule.Action)}), - "condition": schema.NewSet(resourceGCSBucketLifecycleRuleConditionHash, []interface{}{flattenBucketLifecycleRuleCondition(d, rule.Condition)}), + "condition": schema.NewSet(resourceGCSBucketLifecycleRuleConditionHash, []interface{}{flattenBucketLifecycleRuleCondition(index, d, rule.Condition)}), }) } @@ -1254,7 +1254,7 @@ func flattenBucketLifecycleRuleAction(action *storage.BucketLifecycleRuleAction) } } -func flattenBucketLifecycleRuleCondition(d *schema.ResourceData, condition *storage.BucketLifecycleRuleCondition) map[string]interface{} { +func flattenBucketLifecycleRuleCondition(index int, d *schema.ResourceData, condition *storage.BucketLifecycleRuleCondition) map[string]interface{} { ruleCondition := map[string]interface{}{ "created_before": condition.CreatedBefore, "matches_storage_class": tpgresource.ConvertStringArrToInterface(condition.MatchesStorageClass), @@ -1279,7 +1279,7 @@ func flattenBucketLifecycleRuleCondition(d *schema.ResourceData, condition *stor } } // setting no_age value from state config since it is terraform only variable and not getting value from backend. - if v, ok := d.GetOk("lifecycle_rule.0.condition"); ok{ + if v, ok := d.GetOk(fmt.Sprintf("lifecycle_rule.%d.condition",index)); ok{ state_condition := v.(*schema.Set).List()[0].(map[string]interface{}) ruleCondition["no_age"] = state_condition["no_age"].(bool) } diff --git a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.erb b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.erb index 7c686a2878cd..f62447cb399e 100644 --- a/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.erb +++ b/mmv1/third_party/terraform/services/storage/resource_storage_bucket_test.go.erb @@ -508,7 +508,7 @@ func TestAccStorageBucket_lifecycleRulesNoAge(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.no_age"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.1.condition.0.no_age"}, }, { Config: testAccStorageBucket_customAttributes_withLifecycleNoAgeAndAge(bucketName), @@ -522,7 +522,7 @@ func TestAccStorageBucket_lifecycleRulesNoAge(t *testing.T) { ResourceName: "google_storage_bucket.bucket", ImportState: true, ImportStateVerify: true, - ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.0.condition.0.no_age"}, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.1.condition.0.no_age"}, }, { Config: testAccStorageBucket_customAttributes_withLifecycle1(bucketName), @@ -1477,8 +1477,8 @@ func testAccCheckStorageBucketLifecycleConditionState(expected *bool, b *storage func testAccCheckStorageBucketLifecycleConditionNoAge(expected *int64, b *storage.Bucket) resource.TestCheckFunc { return func(s *terraform.State) error { - actual := b.Lifecycle.Rule[0].Condition.Age - if expected == nil && b.Lifecycle.Rule[0].Condition.Age== nil { + actual := b.Lifecycle.Rule[1].Condition.Age + if expected == nil && b.Lifecycle.Rule[1].Condition.Age == nil { return nil } if expected == nil { @@ -1688,6 +1688,15 @@ resource "google_storage_bucket" "bucket" { name = "%s" location = "EU" force_destroy = "true" + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + no_age = false + } + } lifecycle_rule { action { type = "Delete" @@ -1707,6 +1716,15 @@ resource "google_storage_bucket" "bucket" { name = "%s" location = "EU" force_destroy = "true" + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + no_age = false + } + } lifecycle_rule { action { type = "Delete" diff --git a/mmv1/third_party/terraform/services/workstations/resource_workstations_workstation_config_test.go.erb b/mmv1/third_party/terraform/services/workstations/resource_workstations_workstation_config_test.go.erb index 72565a7f5219..e29069d6ab11 100644 --- a/mmv1/third_party/terraform/services/workstations/resource_workstations_workstation_config_test.go.erb +++ b/mmv1/third_party/terraform/services/workstations/resource_workstations_workstation_config_test.go.erb @@ -223,6 +223,184 @@ resource "google_workstations_workstation_config" "default" { `, context) } +func TestAccWorkstationsWorkstationConfig_ephemeralDirectories(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_ephemeralDirectories(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_ephemeralDirectories(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_compute_disk" "test_source_disk" { + provider = google-beta + name = "tf-test-workstation-source-disk%{random_suffix}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "test_source_snapshot" { + provider = google-beta + name = "tf-test-workstation-source-snapshot%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + zone = "us-central1-a" +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + ephemeral_directories { + mount_path = "/cache" + gce_pd { + source_snapshot = google_compute_snapshot.test_source_snapshot.id + read_only = true + } + } + + labels = { + foo = "bar" + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_ephemeralDirectories_withSourceImage(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_ephemeralDirectories_withSourceImage(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_ephemeralDirectories_withSourceImage(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_compute_disk" "test_source_disk" { + provider = google-beta + name = "tf-test-workstation-source-disk%{random_suffix}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_image" "test_source_image" { + provider = google-beta + name = "tf-test-workstation-source-image%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + storage_locations = ["us-central1"] +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + ephemeral_directories { + mount_path = "/cache" + gce_pd { + disk_type = "pd-standard" + source_image = google_compute_image.test_source_image.id + read_only = true + } + } + + labels = { + foo = "bar" + } +} +`, context) +} func TestAccWorkstationsWorkstationConfig_serviceAccount(t *testing.T) { t.Parallel() diff --git a/mmv1/third_party/terraform/website/docs/d/monitoring_app_engine_service.html.markdown b/mmv1/third_party/terraform/website/docs/d/monitoring_app_engine_service.html.markdown index 03525dcd6102..188d6e46e372 100644 --- a/mmv1/third_party/terraform/website/docs/d/monitoring_app_engine_service.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/monitoring_app_engine_service.html.markdown @@ -34,7 +34,7 @@ data "google_monitoring_app_engine_service" "srv" { resource "google_app_engine_standard_app_version" "myapp" { version_id = "v1" service = "myapp" - runtime = "nodejs10" + runtime = "nodejs20" entrypoint { shell = "node ./app.js" diff --git a/mmv1/third_party/terraform/website/docs/guides/getting_started.html.markdown b/mmv1/third_party/terraform/website/docs/guides/getting_started.html.markdown index a329a71258e7..8ecb74a85217 100644 --- a/mmv1/third_party/terraform/website/docs/guides/getting_started.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/getting_started.html.markdown @@ -1,10 +1,10 @@ --- -page_title: "Getting Started with the Google provider" +page_title: "Getting Started with the Google Cloud provider" description: |- - Getting started with the Google Cloud Platform provider + Getting started with the Google Cloud provider --- -# Getting Started with the Google Provider +# Getting Started with the Google Cloud provider ## Before you begin diff --git a/mmv1/third_party/terraform/website/docs/guides/provider_reference.html.markdown b/mmv1/third_party/terraform/website/docs/guides/provider_reference.html.markdown index 56642c7a8675..8b03ef435035 100644 --- a/mmv1/third_party/terraform/website/docs/guides/provider_reference.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/provider_reference.html.markdown @@ -1,7 +1,7 @@ --- -page_title: "Google Provider Configuration Reference" +page_title: "Google Cloud Provider Configuration Reference" description: |- - Configuration reference for the Google provider for Terraform. + Configuration reference for the Terraform provider for Google Cloud. --- # Google Provider Configuration Reference diff --git a/mmv1/third_party/terraform/website/docs/guides/version_2_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_2_upgrade.html.markdown index e3eb664de0e6..f7a3fafb7a67 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_2_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_2_upgrade.html.markdown @@ -1,12 +1,12 @@ --- -page_title: "Terraform Google Provider 2.0.0 Upgrade Guide" +page_title: "Terraform provider for Google Cloud 2.0.0 Upgrade Guide" description: |- - Terraform Google Provider 2.0.0 Upgrade Guide + Terraform provider for Google Cloud 2.0.0 Upgrade Guide --- -# Terraform Google Provider 2.0.0 Upgrade Guide +# Terraform provider for Google Cloud 2.0.0 Upgrade Guide -Version `2.0.0` of the Google provider for Terraform is a major release and +Version `2.0.0` of the Terraform provider for Google Cloud is a major release and includes some changes that you will need to consider when upgrading. This guide is intended to help with that process and focuses only on the changes necessary to upgrade from version `1.20.0` to `2.0.0`. diff --git a/mmv1/third_party/terraform/website/docs/guides/version_3_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_3_upgrade.html.markdown index 06e5e64b36b4..305989cfae75 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_3_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_3_upgrade.html.markdown @@ -1,12 +1,12 @@ --- -page_title: "Terraform Google Provider 3.0.0 Upgrade Guide" +page_title: "Terraform provider for Google Cloud 3.0.0 Upgrade Guide" description: |- - Terraform Google Provider 3.0.0 Upgrade Guide + Terraform provider for Google Cloud 3.0.0 Upgrade Guide --- -# Terraform Google Provider 3.0.0 Upgrade Guide +# Terraform provider for Google Cloud 3.0.0 Upgrade Guide -The `3.0.0` release of the Google provider for Terraform is a major version and +The `3.0.0` release of the Terraform provider for Google Cloud is a major version and includes some changes that you will need to consider when upgrading. This guide is intended to help with that process and focuses only on the changes necessary to upgrade from the final `2.X` series release to `3.0.0`. diff --git a/mmv1/third_party/terraform/website/docs/guides/version_4_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_4_upgrade.html.markdown index bad7cca87838..b34c133d1062 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_4_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_4_upgrade.html.markdown @@ -1,12 +1,12 @@ --- -page_title: "Terraform Google Provider 4.0.0 Upgrade Guide" +page_title: "Terraform provider for Google Cloud 4.0.0 Upgrade Guide" description: |- - Terraform Google Provider 4.0.0 Upgrade Guide + Terraform provider for Google Cloud 4.0.0 Upgrade Guide --- -# Terraform Google Provider 4.0.0 Upgrade Guide +# Terraform provider for Google Cloud 4.0.0 Upgrade Guide -The `4.0.0` release of the Google provider for Terraform is a major version and +The `4.0.0` release of the Terraform provider for Google Cloud is a major version and includes some changes that you will need to consider when upgrading. This guide is intended to help with that process and focuses only on the changes necessary to upgrade from the final `3.X` series release to `4.0.0`. diff --git a/mmv1/third_party/terraform/website/docs/guides/version_5_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_5_upgrade.html.markdown index 1bd49e4d52c7..078ef0f0323a 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_5_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_5_upgrade.html.markdown @@ -1,12 +1,12 @@ --- -page_title: "Terraform Google Provider 5.0.0 Upgrade Guide" +page_title: "Terraform provider for Google Cloud 5.0.0 Upgrade Guide" description: |- - Terraform Google Provider 5.0.0 Upgrade Guide + Terraform provider for Google Cloud 5.0.0 Upgrade Guide --- -# Terraform Google Provider 5.0.0 Upgrade Guide +# Terraform provider for Google Cloud 5.0.0 Upgrade Guide -The `5.0.0` release of the Google provider for Terraform is a major version and +The `5.0.0` release of the Terraform provider for Google Cloud is a major version and includes some changes that you will need to consider when upgrading. This guide is intended to help with that process and focuses only on the changes necessary to upgrade from the final `4.X` series release to `5.0.0`. @@ -113,8 +113,8 @@ included in requests to the API. Replacing those labels' values with `_` or `true` are recommended. Not all of Google Cloud resources support labels and annotations. Please check -the Terraform Google provider resource documentation to figure out if a given -resource supports `labels` or `annotations` fields. +the resource documentation to figure out if a given resource supports `labels` +or `annotations` fields. #### Provider default labels @@ -188,7 +188,7 @@ Provider-level default annotations are not supported at this time. #### Resource labels -Previously, `labels` and `annotations` fields in the Terraform Google provider +Previously, `labels` and `annotations` fields in the Google Cloud provider were authoritative and Terraform thought it was the only owner of the fields. This model worked well initially, but with the introduction of system labels and other client-managed labels, Terraform would conflict with their labels and show diff --git a/mmv1/third_party/terraform/website/docs/index.html.markdown b/mmv1/third_party/terraform/website/docs/index.html.markdown index db9c0e41fbab..fee57ac9c426 100644 --- a/mmv1/third_party/terraform/website/docs/index.html.markdown +++ b/mmv1/third_party/terraform/website/docs/index.html.markdown @@ -1,12 +1,12 @@ --- -page_title: "Provider: Google Cloud Platform" +page_title: "Provider: Google Cloud" description: |- - The Google provider is used to configure your Google Cloud Platform infrastructure + The Terraform provider for Google Cloud is used to configure your Google Cloud infrastructure --- -# Google Cloud Platform Provider +# Terraform provider for Google Cloud -The Google provider is used to configure your [Google Cloud Platform](https://cloud.google.com/) infrastructure. +The Google Cloud provider is used to configure your [Google Cloud](https://cloud.google.com/) infrastructure. To learn the basics of Terraform using this provider, follow the hands-on [get started tutorials](https://developer.hashicorp.com/terraform/tutorials/gcp-get-started/infrastructure-as-code). @@ -14,7 +14,7 @@ For more involved examples, try [provisioning a GKE cluster](https://learn.hashi and deploying [Consul-backed Vault into it using Terraform Cloud](https://learn.hashicorp.com/tutorials/terraform/kubernetes-consul-vault-pipeline). Already experienced with Terraform? Check out the [Getting Started](/docs/providers/google/guides/getting_started.html) -page for a short introduction to using Terraform with Google Cloud Platform. +page for a short introduction to using Terraform with Google Cloud. ## Example Usage @@ -31,9 +31,9 @@ See the [provider reference](/docs/providers/google/guides/provider_reference.ht page for details on authentication and configuring the provider. Take advantage of [Modules](https://www.terraform.io/docs/modules/index.html) -to simplify your config by browsing the [Module Registry for GCP modules](https://registry.terraform.io/browse?provider=google). +to simplify your config by browsing the [Module Registry for Google Cloud modules](https://registry.terraform.io/browse?provider=google). -The Google provider is jointly maintained by: +The Google Cloud provider is jointly maintained by: * The [Terraform Team](https://cloud.google.com/docs/terraform) at Google * The Terraform team at [HashiCorp](https://www.hashicorp.com/) @@ -41,7 +41,7 @@ The Google provider is jointly maintained by: If you have configuration questions, or general questions about using the provider, try checking out: * [The Google category on discuss.hashicorp.com](https://discuss.hashicorp.com/c/terraform-providers/tf-google/32) -* The [Google Cloud Platform Community Slack](https://googlecloud-community.slack.com/) `#terraform` channel. If you are not registered with that Slack Workspace yet, the up-to-date **public sign-up link** can be found in the "Stay Connected" section of the [Google Developer Center](https://cloud.google.com/developers#stay-connected). +* The [Google Cloud Community Slack](https://googlecloud-community.slack.com/) `#terraform` channel. If you are not registered with that Slack Workspace yet, the up-to-date **public sign-up link** can be found in the "Stay Connected" section of the [Google Developer Center](https://cloud.google.com/developers#stay-connected). * [Terraform's community resources](https://www.terraform.io/docs/extend/community/index.html) * [HashiCorp support](https://support.hashicorp.com) for Terraform Enterprise customers @@ -53,22 +53,22 @@ and the [`google-beta` provider Releases](https://github.com/hashicorp/terraform for release notes and additional information. Per [Terraform Provider Versioning](https://www.hashicorp.com/blog/hashicorp-terraform-provider-versioning), -the Google provider follows [semantic versioning](https://semver.org/). +the Google Cloud provider follows [semantic versioning](https://semver.org/). In practice, patch / bugfix-only releases of the provider are infrequent. Most provider releases are either minor or major releases. ### Minor Releases -The Google provider currently aims to publish a minor release every week, +The Google Cloud provider currently aims to publish a minor release every week, although the timing of individual releases may differ if required by the provider team. ### Major Releases -The Google provider publishes major releases roughly yearly. An upgrade guide -will be published to help ease you through the transition between the prior -releases series and the new major release. +The Google Cloud provider publishes major releases roughly yearly. An upgrade +guide will be published to help ease you through the transition between the +prior releases series and the new major release. During major releases, all current deprecation warnings will be resolved, removing the field in question unless the deprecation warning message specifies @@ -80,7 +80,7 @@ lifecycle to give users plenty of time to safely update their configs. ## Features and Bug Requests -The Google provider's bugs and feature requests can be found in the [GitHub repo issues](https://github.com/hashicorp/terraform-provider-google/issues). +The Google Cloud provider's bugs and feature requests can be found in the [GitHub repo issues](https://github.com/hashicorp/terraform-provider-google/issues). Please avoid "me too" or "+1" comments. Instead, use a thumbs up [reaction](https://blog.github.com/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/) on enhancement requests. Provider maintainers will often prioritize work based on the number of thumbs on an issue. @@ -110,7 +110,7 @@ page for details on configuring the provider. ## Contributing -If you'd like to help extend the Google provider, we gladly accept community +If you'd like to help extend the Google Cloud provider, we gladly accept community contributions! Development on the providers is done through the [Magic Modules](https://github.com/GoogleCloudPlatform/magic-modules) repository. Our full contribution guide is available on the diff --git a/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown b/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown index 8a8b26b87091..b24afb988530 100644 --- a/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/cloudfunctions_function.html.markdown @@ -149,6 +149,8 @@ Please refer to the field 'effective_labels' for all of the labels present on th * `build_environment_variables` - (Optional) A set of key/value environment variable pairs available during build time. +* `build_worker_pool` - (Optional) Name of the Cloud Build Custom Worker Pool that should be used to build the function. + * `vpc_connector` - (Optional) The VPC Network Connector that this cloud function can connect to. It should be set up as fully-qualified URI. The format of this field is `projects/*/locations/*/connectors/*`. * `vpc_connector_egress_settings` - (Optional) The egress settings for the connector, controlling what traffic is diverted through it. Allowed values are `ALL_TRAFFIC` and `PRIVATE_RANGES_ONLY`. Defaults to `PRIVATE_RANGES_ONLY`. If unset, this field preserves the previously set value. @@ -160,9 +162,9 @@ Please refer to the field 'effective_labels' for all of the labels present on th * `source_repository` - (Optional) Represents parameters related to source repository where a function is hosted. Cannot be set alongside `source_archive_bucket` or `source_archive_object`. Structure is [documented below](#nested_source_repository). It must match the pattern `projects/{project}/locations/{location}/repositories/{repository}`.* -* `docker_registry` - (Optional) Docker Registry to use for storing the function's Docker images. Allowed values are CONTAINER_REGISTRY (default) and ARTIFACT_REGISTRY. +* `docker_registry` - (Optional) Docker Registry to use for storing the function's Docker images. Allowed values are ARTIFACT_REGISTRY (default) and CONTAINER_REGISTRY. -* `docker_repository` - (Optional) User managed repository created in Artifact Registry optionally with a customer managed encryption key. If specified, deployments will use Artifact Registry. This is the repository to which the function docker image will be pushed after it is built by Cloud Build. If unspecified, Container Registry will be used by default, unless specified otherwise by other means. +* `docker_repository` - (Optional) User-managed repository created in Artifact Registry to which the function's Docker image will be pushed after it is built by Cloud Build. May optionally be encrypted with a customer-managed encryption key (CMEK). If unspecified and `docker_registry` is not explicitly set to `CONTAINER_REGISTRY`, GCF will create and use a default Artifact Registry repository named 'gcf-artifacts' in the region. * `kms_key_name` - (Optional) Resource name of a KMS crypto key (managed by the user) used to encrypt/decrypt function resources. It must match the pattern `projects/{project}/locations/{location}/keyRings/{key_ring}/cryptoKeys/{crypto_key}`. If specified, you must also provide an artifact registry repository using the `docker_repository` field that was created with the same KMS crypto key. Before deploying, please complete all pre-requisites described in https://cloud.google.com/functions/docs/securing/cmek#granting_service_accounts_access_to_the_key diff --git a/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown b/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown index 61e961b78b54..81865ec37908 100644 --- a/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown @@ -229,7 +229,7 @@ The following arguments are supported: * `config` - (Optional) - Configuration parameters for this environment Structure is [documented below](#nested_config). + Configuration parameters for this environment Structure is [documented below](#nested_config_c1). * `labels` - (Optional) @@ -260,7 +260,7 @@ The following arguments are supported: (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. -The `config` block supports: +The `config` block supports: * `node_count` - (Optional, Cloud Composer 1 only) @@ -268,28 +268,19 @@ The following arguments are supported: * `node_config` - (Optional) - The configuration used for the Kubernetes Engine cluster. Structure is [documented below](#nested_node_config). + The configuration used for the Kubernetes Engine cluster. Structure is [documented below](#nested_node_config_c1). * `recovery_config` - (Optional, Cloud Composer 2 only) - The configuration settings for recovery. Structure is [documented below](#nested_recovery_config). + The configuration settings for recovery. Structure is [documented below](#nested_recovery_config_c1). * `software_config` - (Optional) - The configuration settings for software inside the environment. Structure is [documented below](#nested_software_config). + The configuration settings for software inside the environment. Structure is [documented below](#nested_software_config_c1). * `private_environment_config` - (Optional) - The configuration used for the Private IP Cloud Composer environment. Structure is [documented below](#nested_private_environment_config). - -* `enable_private_environment` - - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) - If true, a private Composer environment will be created. - -* `enable_private_builds_only` - - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) - If true, builds performed during operations that install Python packages have only private connectivity to Google services. - If false, the builds also have access to the internet. + The configuration used for the Private IP Cloud Composer environment. Structure is [documented below](#nested_private_environment_config_c1). * `web_server_network_access_control` - The network-level access control policy for the Airflow web server. @@ -319,9 +310,9 @@ The following arguments are supported: master authorized networks will disallow all external traffic to access Kubernetes master through HTTPS except traffic from the given CIDR blocks, Google Compute Engine Public IPs and Google Prod IPs. Structure is - [documented below](#nested_master_authorized_networks_config). + [documented below](#nested_master_authorized_networks_config_c1). -The `node_config` block supports: +The `node_config` block supports: * `zone` - (Optional, Cloud Composer 1 only) @@ -382,7 +373,7 @@ The following arguments are supported: * `ip_allocation_policy` - (Optional) Configuration for controlling how IPs are allocated in the GKE cluster. - Structure is [documented below](#nested_ip_allocation_policy). + Structure is [documented below](#nested_ip_allocation_policy_c1). Cannot be updated. * `max_pods_per_node` - @@ -401,7 +392,7 @@ The following arguments are supported: all destination addresses, except between pods traffic. See the [documentation](https://cloud.google.com/composer/docs/enable-ip-masquerade-agent). -The `software_config` block supports: +The `software_config` block supports: * `airflow_config_overrides` - (Optional) Apache Airflow configuration properties to override. Property keys contain the section and property names, @@ -444,7 +435,7 @@ The following arguments are supported: ``` * `image_version` - - (Optional in Cloud Composer 1, required in Cloud Composer 2) +(Required) In Composer 1, use a specific Composer 1 version in this parameter. If omitted, the default is the latest version of Composer 2. The version of the software running in the environment. This encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression @@ -465,12 +456,8 @@ The following arguments are supported: (Optional, Cloud Composer 1 with Airflow 2 only) The number of schedulers for Airflow. -* `web_server_plugins_mode` - - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) - Web server plugins configuration. Should be either 'ENABLED' or 'DISABLED'. Defaults to 'ENABLED'. - -See [documentation](https://cloud.google.com/composer/docs/how-to/managing/configuring-private-ip) for setting up private environments. The `private_environment_config` block supports: +See [documentation](https://cloud.google.com/composer/docs/how-to/managing/configuring-private-ip) for setting up private environments. The `private_environment_config` block supports: * `connection_type` - (Optional, Cloud Composer 2 only) @@ -506,9 +493,9 @@ See [documentation](https://cloud.google.com/composer/docs/how-to/managing/confi The `web_server_network_access_control` supports: * `allowed_ip_range` - - A collection of allowed IP ranges with descriptions. Structure is [documented below](#nested_allowed_ip_range). + A collection of allowed IP ranges with descriptions. Structure is [documented below](#nested_allowed_ip_range_c1). -The `allowed_ip_range` supports: +The `allowed_ip_range` supports: * `value` - (Required) @@ -521,7 +508,7 @@ The `web_server_network_access_control` supports: (Optional) A description of this ip range. -The `ip_allocation_policy` block supports: +The `ip_allocation_policy` block supports: * `use_ip_aliases` - (Optional, Cloud Composer 1 only) @@ -560,7 +547,7 @@ The `web_server_network_access_control` supports: (e.g. 10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16) to pick a specific range to use. Specify either `services_secondary_range_name` or `services_ipv4_cidr_block` but not both. -The `database_config` block supports: +The `database_config` block supports: * `machine_type` - (Optional) @@ -571,7 +558,7 @@ The `web_server_network_access_control` supports: (Optional) Preferred Cloud SQL database zone. -The `web_server_config` block supports: +The `web_server_config` block supports: * `machine_type` - (Required) @@ -580,7 +567,7 @@ The `web_server_network_access_control` supports: Value custom is returned only in response, if Airflow web server parameters were manually changed to a non-standard values. -The `encryption_config` block supports: +The `encryption_config` block supports: * `kms_key_name` - (Required) @@ -588,7 +575,7 @@ The `web_server_network_access_control` supports: be the fully qualified resource name, i.e. projects/project-id/locations/location/keyRings/keyring/cryptoKeys/key. Cannot be updated. -The `maintenance_window` block supports: +The `maintenance_window` block supports: * `start_time` - (Required) Start time of the first recurrence of the maintenance window. @@ -604,15 +591,15 @@ The `web_server_network_access_control` supports: The only allowed values for 'FREQ' field are 'FREQ=DAILY' and 'FREQ=WEEKLY;BYDAY=...'. Example values: 'FREQ=WEEKLY;BYDAY=TU,WE', 'FREQ=DAILY'. -The `master_authorized_networks_config` block supports: +The `master_authorized_networks_config` block supports: * `enabled` - (Required) Whether or not master authorized networks is enabled. * `cidr_blocks` - - `cidr_blocks `define up to 50 external networks that could access Kubernetes master through HTTPS. Structure is [documented below](#nested_cidr_blocks). + `cidr_blocks `define up to 50 external networks that could access Kubernetes master through HTTPS. Structure is [documented below](#nested_cidr_blocks_c1). -The `cidr_blocks` supports: +The `cidr_blocks` supports: * `display_name` - (Optional) @@ -632,7 +619,7 @@ The following arguments are supported: * `config` - (Optional) - Configuration parameters for this environment. Structure is documented below. + Configuration parameters for this environment. Structure is [documented below](#nested_config_c2). * `labels` - (Optional) @@ -656,24 +643,23 @@ The following arguments are supported: * `storage_config` - (Optional) - Configuration options for storage used by Composer environment. Structure is documented below. + Configuration options for storage used by Composer environment. Structure is [documented below](#nested_storage_config_c2). -The `config` block supports: +The `config` block supports: * `node_config` - (Optional) - The configuration used for the Kubernetes Engine cluster. Structure is documented below. + The configuration used for the Kubernetes Engine cluster. Structure is [documented below](#nested_node_config_c2). * `software_config` - (Optional) The configuration settings for software (Airflow) inside the environment. Structure is - documented below. + [documented below](#nested_software_config_c2). * `private_environment_config` - (Optional) - The configuration used for the Private IP Cloud Composer environment. Structure is documented - below. + The configuration used for the Private IP Cloud Composer environment. Structure is [documented below](#nested_private_environment_config_c2). * `encryption_config` - (Optional) @@ -685,12 +671,12 @@ The `config` block supports: The configuration settings for Cloud Composer maintenance windows. * `workloads_config` - - (Optional, Cloud Composer 2 only) + (Optional) The Kubernetes workloads configuration for GKE cluster associated with the Cloud Composer environment. * `environment_size` - - (Optional, Cloud Composer 2 only) + (Optional) The environment size controls the performance parameters of the managed Cloud Composer infrastructure that includes the Airflow database. Values for environment size are `ENVIRONMENT_SIZE_SMALL`, `ENVIRONMENT_SIZE_MEDIUM`, @@ -709,20 +695,20 @@ The `config` block supports: master authorized networks will disallow all external traffic to access Kubernetes master through HTTPS except traffic from the given CIDR blocks, Google Compute Engine Public IPs and Google Prod IPs. Structure is - documented below. + [documented below](#nested_master_authorized_networks_config_c1). * `data_retention_config` - (Optional, Cloud Composer 2.0.23 or newer only) Configuration setting for airflow data rentention mechanism. Structure is - [documented below](#nested_data_retention_config). + [documented below](#nested_data_retention_config_c2). -The `data_retention_config` block supports: +The `data_retention_config` block supports: * `task_logs_retention_config` - (Optional) The configuration setting for Task Logs. Structure is - [documented below](#nested_task_logs_retention_config). + [documented below](#nested_task_logs_retention_config_c2). -The `task_logs_retention_config` block supports: +The `task_logs_retention_config` block supports: * `storage_mode` - (Optional) The mode of storage for Airflow workers task logs. Values for storage mode are @@ -730,14 +716,14 @@ The `config` block supports: `CLOUD_LOGGING_AND_CLOUD_STORAGE` to store logs in cloud logging and cloud storage. -The `storage_config` block supports: +The `storage_config` block supports: * `bucket` - (Required) Name of an existing Cloud Storage bucket to be used by the environment. -The `node_config` block supports: +The `node_config` block supports: * `network` - (Optional) @@ -773,7 +759,7 @@ The `node_config` block supports: * `ip_allocation_policy` - (Optional) Configuration for controlling how IPs are allocated in the GKE cluster. - Structure is documented below. + Structure is [documented below](#nested_ip_allocation_policy_c2). Cannot be updated. * `enable_ip_masq_agent` - @@ -783,12 +769,7 @@ The `node_config` block supports: packets from node IP addresses instead of Pod IP addresses See the [documentation](https://cloud.google.com/composer/docs/enable-ip-masquerade-agent). -* `composer_internal_ipv4_cidr_block` - - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) - At least /20 IPv4 cidr range that will be used by Composer internal components. - Cannot be updated. - -The `software_config` block supports: +The `software_config` block supports: * `airflow_config_overrides` - (Optional) Apache Airflow configuration properties to override. Property keys contain the section and property names, @@ -831,10 +812,8 @@ The `software_config` block supports: ``` * `image_version` - - (Required in Cloud Composer 2, optional in Cloud Composer 1) +(Optional) If omitted, the default is the latest version of Composer 2. - **In Cloud Composer 2, you must specify an image with Cloud Composer 2**. Otherwise, the default image for Cloud Composer 1 is used. For more information about Cloud Composer images, see - [Cloud Composer version list](https://cloud.google.com/composer/docs/concepts/versioning/composer-versions). The version of the software running in the environment. This encapsulates both the version of Cloud Composer functionality and the version of Apache Airflow. It must match the regular expression @@ -853,14 +832,14 @@ The `software_config` block supports: (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer environments in versions composer-2.1.2-airflow-*.*.* and newer) The configuration for Cloud Data Lineage integration. Structure is - [documented below](#nested_cloud_data_lineage_integration). + [documented below](#nested_cloud_data_lineage_integration_c2). -The `cloud_data_lineage_integration` block supports: +The `cloud_data_lineage_integration` block supports: * `enabled` - (Required) Whether or not Cloud Data Lineage integration is enabled. -See [documentation](https://cloud.google.com/composer/docs/how-to/managing/configuring-private-ip) for setting up private environments. The `private_environment_config` block supports: +See [documentation](https://cloud.google.com/composer/docs/how-to/managing/configuring-private-ip) for setting up private environments. The `private_environment_config` block supports: * `enable_private_endpoint` - If true, access to the public endpoint of the GKE cluster is denied. @@ -894,7 +873,7 @@ See [documentation](https://cloud.google.com/composer/docs/how-to/managing/confi versions `composer-2.*.*-airflow-*.*.*` and newer. -The `ip_allocation_policy` block supports: +The `ip_allocation_policy` block supports: * `cluster_secondary_range_name` - (Optional) @@ -951,7 +930,7 @@ The `ip_allocation_policy` block supports: The only allowed values for 'FREQ' field are 'FREQ=DAILY' and 'FREQ=WEEKLY;BYDAY=...'. Example values: 'FREQ=WEEKLY;BYDAY=TU,WE', 'FREQ=DAILY'. -The `recovery_config` block supports: +The `recovery_config` block supports: * `scheduled_snapshots_config` - (Optional) @@ -993,6 +972,345 @@ The `workloads_config` block supports: (Optional) Configuration for resources used by Airflow workers. +The `scheduler` block supports: + +* `cpu` - + (Optional) + The number of CPUs for a single Airflow scheduler. + +* `memory_gb` - + (Optional) + The amount of memory (GB) for a single Airflow scheduler. + +* `storage_gb` - + (Optional) + The amount of storage (GB) for a single Airflow scheduler. + +* `count` - + (Optional) + The number of schedulers. + +The `triggerer` block supports: + +* `cpu` - + (Required) + The number of CPUs for a single Airflow triggerer. + +* `memory_gb` - + (Required) + The amount of memory (GB) for a single Airflow triggerer. + +* `count` - + (Required) + The number of Airflow triggerers. + +The `web_server` block supports: + +* `cpu` - + (Optional) + The number of CPUs for the Airflow web server. + +* `memory_gb` - + (Optional) + The amount of memory (GB) for the Airflow web server. + +* `storage_gb` - + (Optional) + The amount of storage (GB) for the Airflow web server. + +The `worker` block supports: + +* `cpu` - + (Optional) + The number of CPUs for a single Airflow worker. + +* `memory_gb` - + (Optional) + The amount of memory (GB) for a single Airflow worker. + +* `storage_gb` + (Optional) + The amount of storage (GB) for a single Airflow worker. + +* `min_count` - + (Optional) + The minimum number of Airflow workers that the environment can run. The number of workers in the + environment does not go above this number, even if a lower number of workers can handle the load. + +* `max_count` - + (Optional) + The maximum number of Airflow workers that the environment can run. The number of workers in the + environment does not go above this number, even if a higher number of workers is required to + handle the load. + + +## Argument Reference - Cloud Composer 3 + +**Please note: This documentation corresponds to Composer 3, which is not yet released.** + +The following arguments are supported: + +* `name` - + (Required) + Name of the environment + +* `config` - + (Optional) + Configuration parameters for this environment. Structure is [documented below](#nested_config_c3). + +* `labels` - + (Optional) + User-defined labels for this environment. The labels map can contain + no more than 64 entries. Entries of the labels map are UTF8 strings + that comply with the following restrictions: + Label keys must be between 1 and 63 characters long and must conform + to the following regular expression: `[a-z]([-a-z0-9]*[a-z0-9])?`. + Label values must be between 0 and 63 characters long and must + conform to the regular expression `([a-z]([-a-z0-9]*[a-z0-9])?)?`. + No more than 64 labels can be associated with a given environment. + Both keys and values must be <= 128 bytes in size. + +* `region` - + (Optional) + The location or Compute Engine region for the environment. + +* `project` - + (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + +* `storage_config` - + (Optional) + Configuration options for storage used by Composer environment. Structure is [documented below](#nested_storage_config_c3). + + +The `config` block supports: + +* `node_config` - + (Optional) + The configuration used for the Kubernetes Engine cluster. Structure is [documented below](#nested_node_config_c3). + +* `software_config` - + (Optional) + The configuration settings for software (Airflow) inside the environment. Structure is [documented below](#nested_software_config_c3). + +* `enable_private_environment` - + (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) + If true, a private Composer environment will be created. + +* `enable_private_builds_only` - + (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) + If true, builds performed during operations that install Python packages have only private connectivity to Google services. + If false, the builds also have access to the internet. + +* `encryption_config` - + (Optional) + The encryption options for the Cloud Composer environment and its + dependencies. + +* `maintenance_window` - + (Optional) + The configuration settings for Cloud Composer maintenance windows. + +* `workloads_config` - + (Optional) + The Kubernetes workloads configuration for GKE cluster associated with the + Cloud Composer environment. + +* `environment_size` - + (Optional) + The environment size controls the performance parameters of the managed + Cloud Composer infrastructure that includes the Airflow database. Values for + environment size are `ENVIRONMENT_SIZE_SMALL`, `ENVIRONMENT_SIZE_MEDIUM`, + and `ENVIRONMENT_SIZE_LARGE`. + +* `data_retention_config` - + (Optional, Cloud Composer 2.0.23 or later only) + Configuration setting for Airflow database retention mechanism. Structure is + [documented below](#nested_data_retention_config_c3). + +The `data_retention_config` block supports: +* `task_logs_retention_config` - + (Optional) + The configuration setting for Airflow task logs. Structure is + [documented below](#nested_task_logs_retention_config_c3). + +The `task_logs_retention_config` block supports: +* `storage_mode` - + (Optional) + The mode of storage for Airflow task logs. Values for storage mode are + `CLOUD_LOGGING_ONLY` to only store logs in cloud logging and + `CLOUD_LOGGING_AND_CLOUD_STORAGE` to store logs in cloud logging and cloud storage. + + +The `storage_config` block supports: + +* `bucket` - + (Required) + Name of an existing Cloud Storage bucket to be used by the environment. + + +The `node_config` block supports: + +* `network` - + (Optional) + The Compute Engine network to be used for machine + communications, specified as a self-link, relative resource name + (for example "projects/{project}/global/networks/{network}"), by name. + + The network must belong to the environment's project. If unspecified, the "default" network ID in the environment's + project is used. If a Custom Subnet Network is provided, subnetwork must also be provided. + +* `subnetwork` - + (Optional) + The Compute Engine subnetwork to be used for machine + communications, specified as a self-link, relative resource name (for example, + "projects/{project}/regions/{region}/subnetworks/{subnetwork}"), or by name. If subnetwork is provided, + network must also be provided and the subnetwork must belong to the enclosing environment's project and region. + +* `composer_network_attachment` - + (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) + PSC (Private Service Connect) Network entry point. Customers can pre-create the Network Attachment + and point Cloud Composer environment to use. It is possible to share network attachment among many environments, + provided enough IP addresses are available. + +* `service_account` - + (Optional) + The Google Cloud Platform Service Account to be used by the + node VMs. If a service account is not specified, the "default" + Compute Engine service account is used. Cannot be updated. If given, + note that the service account must have `roles/composer.worker` + for any GCP resources created under the Cloud Composer Environment. + +* `tags` - + (Optional) + The list of instance tags applied to all node VMs. Tags are + used to identify valid sources or targets for network + firewalls. Each tag within the list must comply with RFC1035. + Cannot be updated. + +* `composer_internal_ipv4_cidr_block` - + (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) + /20 IPv4 cidr range that will be used by Composer internal components. + Cannot be updated. + +The `software_config` block supports: + +* `airflow_config_overrides` - + (Optional) Apache Airflow configuration properties to override. Property keys contain the section and property names, + separated by a hyphen, for example "core-dags_are_paused_at_creation". + + Section names must not contain hyphens ("-"), opening square brackets ("["), or closing square brackets ("]"). + The property name must not be empty and cannot contain "=" or ";". Section and property names cannot contain + characters: "." Apache Airflow configuration property names must be written in snake_case. Property values can + contain any character, and can be written in any lower/upper case format. Certain Apache Airflow configuration + property values are [blacklisted](https://cloud.google.com/composer/docs/concepts/airflow-configurations#airflow_configuration_blacklists), + and cannot be overridden. + +* `pypi_packages` - + (Optional) + Custom Python Package Index (PyPI) packages to be installed + in the environment. Keys refer to the lowercase package name (e.g. "numpy"). Values are the lowercase extras and + version specifier (e.g. "==1.12.0", "[devel,gcp_api]", "[devel]>=1.8.2, <1.9.2"). To specify a package without + pinning it to a version specifier, use the empty string as the value. + +* `env_variables` - + (Optional) + Additional environment variables to provide to the Apache Airflow scheduler, worker, and webserver processes. + Environment variable names must match the regular expression `[a-zA-Z_][a-zA-Z0-9_]*`. + They cannot specify Apache Airflow software configuration overrides (they cannot match the regular expression + `AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+`), and they cannot match any of the following reserved names: + ``` + AIRFLOW_HOME + C_FORCE_ROOT + CONTAINER_NAME + DAGS_FOLDER + GCP_PROJECT + GCS_BUCKET + GKE_CLUSTER_NAME + SQL_DATABASE + SQL_INSTANCE + SQL_PASSWORD + SQL_PROJECT + SQL_REGION + SQL_USER + ``` + +* `image_version` - + (Required) If omitted, the default is the latest version of Composer 2. + + In Cloud Composer 3, you can only specify 3 in the Cloud Composer portion of the image version. Example: composer-3-airflow-x.y.z-build.t. + + The Apache Airflow portion of the image version is a full semantic version that points to one of the + supported Apache Airflow versions, or an alias in the form of only major, major.minor or major.minor.patch versions specified. + Like in Composer 1 and 2, a given Airflow version is released multiple times in Composer, with different patches + and versions of dependencies. To distinguish between these versions in Composer 3, you can optionally specify a + build number to pin to a specific Airflow release. + Example: composer-3-airflow-2.6.3-build.4. + + The image version in Composer 3 must match the regular expression: + `composer-(([0-9]+)(\.[0-9]+\.[0-9]+(-preview\.[0-9]+)?)?|latest)-airflow-(([0-9]+)((\.[0-9]+)(\.[0-9]+)?)?(-build\.[0-9]+)?)` + Example: composer-3-airflow-2.6.3-build.4 + + **Important**: In-place upgrade for Composer 3 is not yet supported. + +* `cloud_data_lineage_integration` - + (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), + Cloud Composer environments in versions composer-2.1.2-airflow-*.*.* and later) + The configuration for Cloud Data Lineage integration. Structure is + [documented below](#nested_cloud_data_lineage_integration_c3). + +* `web_server_plugins_mode` - + (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) + Web server plugins configuration. Can be either 'ENABLED' or 'DISABLED'. Defaults to 'ENABLED'. + +The `cloud_data_lineage_integration` block supports: +* `enabled` - + (Required) + Whether or not Cloud Data Lineage integration is enabled. + +The `encryption_config` block supports: + +* `kms_key_name` - + (Required) + Customer-managed Encryption Key available through Google's Key Management Service. It must + be the fully qualified resource name, + i.e. projects/project-id/locations/location/keyRings/keyring/cryptoKeys/key. Cannot be updated. + +The `maintenance_window` block supports: + +* `start_time` - + (Required) + Start time of the first recurrence of the maintenance window. + +* `end_time` - + (Required) + Maintenance window end time. It is used only to calculate the duration of the maintenance window. + The value for end-time must be in the future, relative to 'start_time'. + +* `recurrence` - + (Required) + Maintenance window recurrence. Format is a subset of RFC-5545 (https://tools.ietf.org/html/rfc5545) 'RRULE'. + The only allowed values for 'FREQ' field are 'FREQ=DAILY' and 'FREQ=WEEKLY;BYDAY=...'. + Example values: 'FREQ=WEEKLY;BYDAY=TU,WE', 'FREQ=DAILY'. + +The `workloads_config` block supports: + +* `scheduler` - + (Optional) + Configuration for resources used by Airflow scheduler. + +* `triggerer` - + (Optional) + Configuration for resources used by Airflow triggerer. + +* `web_server` - + (Optional) + Configuration for resources used by Airflow web server. + +* `worker` - + (Optional) + Configuration for resources used by Airflow workers. + * `dag_processor` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html), Cloud Composer 3 only) Configuration for resources used by DAG processor. diff --git a/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown index f6dead3b4382..834633b6b6d9 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_instance_group_manager.html.markdown @@ -153,7 +153,7 @@ The following arguments are supported: * `auto_healing_policies` - (Optional) The autohealing policies for this managed instance group. You can specify only one value. Structure is [documented below](#nested_auto_healing_policies). For more information, see the [official documentation](https://cloud.google.com/compute/docs/instance-groups/creating-groups-of-managed-instances#monitoring_groups). -* `all_instances_config` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) +* `all_instances_config` - (Optional) Properties to set on all instances in the group. After setting allInstancesConfig on the group, you must update the group's instances to apply the configuration. @@ -205,12 +205,13 @@ update_policy { ```hcl instance_lifecycle_policy { - force_update_on_repair = "YES" + force_update_on_repair = "YES" + default_action_on_failure = "DO_NOTHING" } ``` -* `force_update_on_repair` - (Optional, (https://terraform.io/docs/providers/google/guides/provider_versions.html)), Specifies whether to apply the group's latest configuration when repairing a VM. Valid options are: `YES`, `NO`. If `YES` and you updated the group's instance template or per-instance configurations after the VM was created, then these changes are applied when VM is repaired. If `NO` (default), then updates are applied in accordance with the group's update policy type. - +* `force_update_on_repair` - (Optional), Specifies whether to apply the group's latest configuration when repairing a VM. Valid options are: `YES`, `NO`. If `YES` and you updated the group's instance template or per-instance configurations after the VM was created, then these changes are applied when VM is repaired. If `NO` (default), then updates are applied in accordance with the group's update policy type. +* `default_action_on_failure` - (Optional), Default behavior for all instance or health check failures. Valid options are: `REPAIR`, `DO_NOTHING`. If `DO_NOTHING` then instances will not be repaired. If `REPAIR` (default), then failed instances will be repaired. - - - The `all_instances_config` block supports: @@ -226,9 +227,9 @@ all_instances_config { } ``` -* `metadata` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)), The metadata key-value pairs that you want to patch onto the instance. For more information, see [Project and instance metadata](https://cloud.google.com/compute/docs/metadata#project_and_instance_metadata). +* `metadata` - (Optional), The metadata key-value pairs that you want to patch onto the instance. For more information, see [Project and instance metadata](https://cloud.google.com/compute/docs/metadata#project_and_instance_metadata). -* `labels` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)), The label key-value pairs that you want to patch onto the instance. +* `labels` - (Optional), The label key-value pairs that you want to patch onto the instance. - - - diff --git a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_group_manager.html.markdown b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_group_manager.html.markdown index 0a16ac30b14f..8dfbf3ee9a9f 100644 --- a/mmv1/third_party/terraform/website/docs/r/compute_region_instance_group_manager.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/compute_region_instance_group_manager.html.markdown @@ -155,7 +155,7 @@ The following arguments are supported: * `auto_healing_policies` - (Optional) The autohealing policies for this managed instance group. You can specify only one value. Structure is documented below. For more information, see the [official documentation](https://cloud.google.com/compute/docs/instance-groups/creating-groups-of-managed-instances#monitoring_groups). -* `all_instances_config` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) +* `all_instances_config` - (Optional) Properties to set on all instances in the group. After setting allInstancesConfig on the group, you must update the group's instances to apply the configuration. @@ -215,11 +215,14 @@ update_policy { ```hcl instance_lifecycle_policy { - force_update_on_repair = "YES" + force_update_on_repair = "YES" + default_action_on_failure = "DO_NOTHING" } ``` -* `force_update_on_repair` - (Optional, (https://terraform.io/docs/providers/google/guides/provider_versions.html)), Specifies whether to apply the group's latest configuration when repairing a VM. Valid options are: YES, NO. If YES and you updated the group's instance template or per-instance configurations after the VM was created, then these changes are applied when VM is repaired. If NO (default), then updates are applied in accordance with the group's update policy type. +* `force_update_on_repair` - (Optional), Specifies whether to apply the group's latest configuration when repairing a VM. Valid options are: `YES`, `NO`. If `YES` and you updated the group's instance template or per-instance configurations after the VM was created, then these changes are applied when VM is repaired. If `NO` (default), then updates are applied in accordance with the group's update policy type. +* `default_action_on_failure` - (Optional), Default behavior for all instance or health check failures. Valid options are: `REPAIR`, `DO_NOTHING`. If `DO_NOTHING` then instances will not be repaired. If `REPAIR` (default), then failed instances will be repaired. + - - - The `all_instances_config` block supports: @@ -235,9 +238,9 @@ all_instances_config { } ``` -* `metadata` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)), The metadata key-value pairs that you want to patch onto the instance. For more information, see [Project and instance metadata](https://cloud.google.com/compute/docs/metadata#project_and_instance_metadata). +* `metadata` - (Optional), The metadata key-value pairs that you want to patch onto the instance. For more information, see [Project and instance metadata](https://cloud.google.com/compute/docs/metadata#project_and_instance_metadata). -* `labels` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)), The label key-value pairs that you want to patch onto the instance. +* `labels` - (Optional), The label key-value pairs that you want to patch onto the instance. - - - diff --git a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown index 3c72beba2d55..983dd05befe4 100644 --- a/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/container_cluster.html.markdown @@ -790,8 +790,7 @@ The `master_authorized_networks_config.cidr_blocks` block supports: * `disk_type` - (Optional) Type of the disk attached to each node (e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-standard' -* `enable_confidential_storage` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) -Enabling Confidential Storage will create boot disk with confidential mode. It is disabled by default. +* `enable_confidential_storage` - (Optional) Enabling Confidential Storage will create boot disk with confidential mode. It is disabled by default. * `ephemeral_storage_config` - (Optional, [Beta](https://terraform.io/docs/providers/google/guides/provider_versions.html)) Parameters for the ephemeral storage filesystem. If unspecified, ephemeral storage is backed by the boot disk. Structure is [documented below](#nested_ephemeral_storage_config). diff --git a/mmv1/third_party/terraform/website/docs/r/dataflow_flex_template_job.html.markdown b/mmv1/third_party/terraform/website/docs/r/dataflow_flex_template_job.html.markdown index d592b3fdb39c..992c142e8e5d 100644 --- a/mmv1/third_party/terraform/website/docs/r/dataflow_flex_template_job.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/dataflow_flex_template_job.html.markdown @@ -118,6 +118,10 @@ provided, the provider project is used. * `region` - (Optional) The region in which the created job should run. +* `service_account_email` - (Optional) Service account email to run the workers as. + +* `subnetwork` - (Optional) Compute Engine subnetwork for launching instances to run your pipeline. + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are exported: diff --git a/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown b/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown index ed571b6d7bdd..bd79aaa35da8 100644 --- a/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/gke_hub_feature_membership.html.markdown @@ -504,6 +504,10 @@ The following arguments are supported: (Optional) The maximum number of audit violations to be stored in a constraint. If not set, the default of 20 will be used. + * `deployment_configs` - + (Optional) + Map of deployment configs to deployments ("admission", "audit", "mutation"). + * `policy_content` - (Optional) Specifies the desired policy content on the cluster. Structure is [documented below](#nested_policy_content). @@ -514,12 +518,97 @@ The following arguments are supported: (Optional) Specifies the list of backends Policy Controller will export to. Must be one of `CLOUD_MONITORING` or `PROMETHEUS`. Defaults to [`CLOUD_MONITORING`, `PROMETHEUS`]. Specifying an empty value `[]` disables metrics export. +The `deployment_configs` block supports: + +* `component_name` - + (Required) + The name of the component. One of `admission` `audit` or `mutation` + +* `container_resources` - + (Optional) + Container resource requirements. + +* `pod_affinity` - + (Optional) + Pod affinity configuration. Possible values: AFFINITY_UNSPECIFIED, NO_AFFINITY, ANTI_AFFINITY + +* `pod_tolerations` - + (Optional) + Pod tolerations of node taints. + +* `replica_count` - + (Optional) + Pod replica count. + +The `container_resources` block supports: + +* `limits` - + (Optional) + Limits describes the maximum amount of compute resources allowed for use by the running container. + +* `requests` - + (Optional) + Requests describes the amount of compute resources reserved for the container by the kube-scheduler. + +The `limits` block supports: + +* `cpu` - + (Optional) + CPU requirement expressed in Kubernetes resource units. + +* `memory` - + (Optional) + Memory requirement expressed in Kubernetes resource units. + +The `requests` block supports: + +* `cpu` - + (Optional) + CPU requirement expressed in Kubernetes resource units. + +* `memory` - + (Optional) + Memory requirement expressed in Kubernetes resource units. + +The `pod_tolerations` block supports: + +* `effect` - + (Optional) + Matches a taint effect. + +* `key` - + (Optional) + Matches a taint key (not necessarily unique). + +* `operator` - + (Optional) + Matches a taint operator. + +* `value` - + (Optional) + Matches a taint value. + The `policy_content` block supports: +* `bundles` - + (Optional) + map of bundle name to BundleInstallSpec. The bundle name maps to the `bundleName` key in the `policycontroller.gke.io/constraintData` annotation on a constraint. + * `template_library` (Optional) Configures the installation of the Template Library. Structure is [documented below](#nested_template_library). +The `template_library` block supports: +The `bundles` block supports: + +* `bundle_name` - + (Required) + The name of the bundle. + +* `exempted_namespaces` - + (Optional) + The set of namespaces to be exempted from the bundle. + The `template_library` block supports: * `installation` diff --git a/mmv1/third_party/terraform/website/docs/r/google_project_iam.html.markdown b/mmv1/third_party/terraform/website/docs/r/google_project_iam.html.markdown index 419315c503d7..33ee748e341d 100644 --- a/mmv1/third_party/terraform/website/docs/r/google_project_iam.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/google_project_iam.html.markdown @@ -228,7 +228,7 @@ An [`import` block](https://developer.hashicorp.com/terraform/language/import) ( ```tf import { - id = ""{{project_id}} roles/viewer user:foo@example.com"m" + id = "{{project_id}} roles/viewer user:foo@example.com" to = google_project_iam_member.default } ``` diff --git a/mmv1/third_party/terraform/website/docs/r/spanner_database_iam.html.markdown b/mmv1/third_party/terraform/website/docs/r/spanner_database_iam.html.markdown index 1de101713eb3..ef09772c1fd0 100644 --- a/mmv1/third_party/terraform/website/docs/r/spanner_database_iam.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/spanner_database_iam.html.markdown @@ -39,6 +39,32 @@ resource "google_spanner_database_iam_policy" "database" { } ``` +With IAM Conditions: + +```hcl +data "google_iam_policy" "admin" { + binding { + role = "roles/editor" + + members = [ + "user:jane@example.com", + ] + + condition { + title = "My Role" + description = "Grant permissions on my_role" + expression = "(resource.type == \"spanner.googleapis.com/DatabaseRole\" && (resource.name.endsWith(\"/myrole\")))" + } + } +} + +resource "google_spanner_database_iam_policy" "database" { + instance = "your-instance-name" + database = "your-database-name" + policy_data = data.google_iam_policy.admin.policy_data +} +``` + ## google\_spanner\_database\_iam\_binding ```hcl @@ -53,6 +79,26 @@ resource "google_spanner_database_iam_binding" "database" { } ``` +With IAM Conditions: + +```hcl +resource "google_spanner_database_iam_binding" "database" { + instance = "your-instance-name" + database = "your-database-name" + role = "roles/compute.networkUser" + + members = [ + "user:jane@example.com", + ] + + condition { + title = "My Role" + description = "Grant permissions on my_role" + expression = "(resource.type == \"spanner.googleapis.com/DatabaseRole\" && (resource.name.endsWith(\"/myrole\")))" + } +} +``` + ## google\_spanner\_database\_iam\_member ```hcl @@ -64,6 +110,23 @@ resource "google_spanner_database_iam_member" "database" { } ``` +With IAM Conditions: + +```hcl +resource "google_spanner_database_iam_member" "database" { + instance = "your-instance-name" + database = "your-database-name" + role = "roles/compute.networkUser" + member = "user:jane@example.com" + + condition { + title = "My Role" + description = "Grant permissions on my_role" + expression = "(resource.type == \"spanner.googleapis.com/DatabaseRole\" && (resource.name.endsWith(\"/myrole\")))" + } +} +``` + ## Argument Reference The following arguments are supported: @@ -91,6 +154,23 @@ The following arguments are supported: * `project` - (Optional) The ID of the project in which the resource belongs. If it is not provided, the provider project is used. +* `condition` - (Optional) An [IAM Condition](https://cloud.google.com/iam/docs/conditions-overview) for a given binding. + Structure is [documented below](#nested_condition). + +--- + +The `condition` block supports: + +* `expression` - (Required) Textual representation of an expression in Common Expression Language syntax. + +* `title` - (Required) A title for the expression, i.e. a short string describing its purpose. + +* `description` - (Optional) An optional description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI. + +~> **Warning:** Terraform considers the `role` and condition contents (`title`+`description`+`expression`) as the +identifier for the binding. This means that if any part of the condition is changed out-of-band, Terraform will +consider it to be an entirely different resource and will treat it as such. + ## Attributes Reference In addition to the arguments listed above, the following computed attributes are diff --git a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset.json b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset.json index c55d9a2c4ee6..427f48a3e41b 100644 --- a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset.json +++ b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset.json @@ -1,6 +1,6 @@ [ { - "name": "//bigquery.googleapis.com/projects/{{.Provider.project}}/datasets/test-dataset", + "name": "//bigquery.googleapis.com/projects/{{.Provider.project}}/datasets/test_dataset", "asset_type": "bigquery.googleapis.com/Dataset", "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", "resource": { @@ -11,7 +11,7 @@ "data": { "friendlyName": "", "datasetReference": { - "datasetId": "test-dataset" + "datasetId": "test_dataset" }, "labels": { "env": "dev" diff --git a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset.tf b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset.tf index 44bd499f16c0..a72c95ae1fe6 100644 --- a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset.tf +++ b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset.tf @@ -28,7 +28,7 @@ provider "google" { } resource "google_bigquery_dataset" "default" { - dataset_id = "test-dataset" + dataset_id = "test_dataset" location = "EU" default_table_expiration_ms = 3600000 diff --git a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_binding.json b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_binding.json index 770149249ec3..e76f11d2022e 100644 --- a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_binding.json +++ b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_binding.json @@ -1,6 +1,6 @@ [ { - "name": "//bigquery.googleapis.com/projects/{{.Provider.project}}/datasets/test-dataset", + "name": "//bigquery.googleapis.com/projects/{{.Provider.project}}/datasets/test_dataset", "asset_type": "bigquery.googleapis.com/Dataset", "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", "resource": { @@ -10,7 +10,7 @@ "parent": "//cloudresourcemanager.googleapis.com/projects/{{.Provider.project}}", "data": { "datasetReference": { - "datasetId": "test-dataset" + "datasetId": "test_dataset" }, "defaultTableExpirationMs": 3600000, "labels": { diff --git a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_binding.tf b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_binding.tf index f8330b074568..12e8b8626361 100644 --- a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_binding.tf +++ b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_binding.tf @@ -28,7 +28,7 @@ provider "google" { } resource "google_bigquery_dataset" "example_dataset" { - dataset_id = "test-dataset" + dataset_id = "test_dataset" location = "EU" project = "{{.Provider.project}}" default_table_expiration_ms = 3600000 diff --git a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_member.json b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_member.json index 7aed9ac6ce97..c0b0e3053ef5 100644 --- a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_member.json +++ b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_member.json @@ -1,6 +1,6 @@ [ { - "name": "//bigquery.googleapis.com/projects/{{.Provider.project}}/datasets/test-dataset", + "name": "//bigquery.googleapis.com/projects/{{.Provider.project}}/datasets/test_dataset", "asset_type": "bigquery.googleapis.com/Dataset", "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", "resource": { @@ -10,7 +10,7 @@ "parent": "//cloudresourcemanager.googleapis.com/projects/{{.Provider.project}}", "data": { "datasetReference": { - "datasetId": "test-dataset" + "datasetId": "test_dataset" }, "defaultTableExpirationMs": 3600000, "labels": { diff --git a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_member.tf b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_member.tf index f7d87317adeb..337e4d1b7988 100644 --- a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_member.tf +++ b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_member.tf @@ -28,7 +28,7 @@ provider "google" { } resource "google_bigquery_dataset" "example_dataset" { - dataset_id = "test-dataset" + dataset_id = "test_dataset" location = "EU" project = "{{.Provider.project}}" default_table_expiration_ms = 3600000 diff --git a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_policy.json b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_policy.json index 5908f8d7600c..f00a09b317b4 100644 --- a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_policy.json +++ b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_policy.json @@ -1,6 +1,6 @@ [ { - "name": "//bigquery.googleapis.com/projects/{{.Provider.project}}/datasets/test-dataset", + "name": "//bigquery.googleapis.com/projects/{{.Provider.project}}/datasets/test_dataset", "asset_type": "bigquery.googleapis.com/Dataset", "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", "resource": { @@ -10,7 +10,7 @@ "parent": "//cloudresourcemanager.googleapis.com/projects/{{.Provider.project}}", "data": { "datasetReference": { - "datasetId": "test-dataset" + "datasetId": "test_dataset" }, "defaultTableExpirationMs": 3600000, "labels": { diff --git a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_policy.tf b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_policy.tf index ae6554fe8d28..89a2450aecb2 100644 --- a/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_policy.tf +++ b/mmv1/third_party/tgc/tests/data/example_bigquery_dataset_iam_policy.tf @@ -28,7 +28,7 @@ provider "google" { } resource "google_bigquery_dataset" "example-dataset" { - dataset_id = "test-dataset" + dataset_id = "test_dataset" location = "EU" project = "{{.Provider.project}}" default_table_expiration_ms = 3600000 diff --git a/tools/diff-processor/README.md b/tools/diff-processor/README.md index 7cd410dd854a..3bb107cae9d1 100644 --- a/tools/diff-processor/README.md +++ b/tools/diff-processor/README.md @@ -16,7 +16,7 @@ bin/diff-processor breaking-changes # Add labels to a PR based on the resources changed between OLD_REF and NEW_REF # The token used must have write access to issues -GITHUB_TOKEN=github_token bin/diff-processor add-labels PR_ID [--dry-run] +GITHUB_TOKEN_MAGIC_MODULES=github_token bin/diff-processor add-labels PR_ID [--dry-run] ``` ## Test diff --git a/tools/diff-processor/labels/get_issue.go b/tools/diff-processor/labels/get_issue.go index fe9b5a235d8a..c9d756892a48 100644 --- a/tools/diff-processor/labels/get_issue.go +++ b/tools/diff-processor/labels/get_issue.go @@ -20,7 +20,7 @@ func GetIssue(repository string, id uint64) (labeler.Issue, error) { return issue, fmt.Errorf("Error creating request: %w", err) } req.Header.Add("Accept", "application/vnd.github+json") - req.Header.Add("Authorization", "Bearer "+os.Getenv("GITHUB_TOKEN")) + req.Header.Add("Authorization", "Bearer "+os.Getenv("GITHUB_TOKEN_MAGIC_MODULES")) req.Header.Add("X-GitHub-Api-Version", "2022-11-28") resp, err := client.Do(req) if err != nil { diff --git a/tools/diff-processor/rules/rule_test.go b/tools/diff-processor/rules/rule_test.go index c68deed47d2c..2ed61fd00c9a 100644 --- a/tools/diff-processor/rules/rule_test.go +++ b/tools/diff-processor/rules/rule_test.go @@ -23,7 +23,7 @@ func TestUniqueRuleIdentifiers(t *testing.T) { func TestMarkdownIdentifiers(t *testing.T) { // Define the Markdown file path relative to the importer - mdFilePath := "../../../docs/content/develop/breaking-changes.md" + mdFilePath := "../../../docs/content/develop/breaking-changes/breaking-changes.md" // Read the Markdown file mdContent, err := ioutil.ReadFile(mdFilePath) diff --git a/tools/go-changelog/README.md b/tools/go-changelog/README.md index 64967a73b685..5dcdaddf096b 100644 --- a/tools/go-changelog/README.md +++ b/tools/go-changelog/README.md @@ -156,8 +156,7 @@ formatting. ## Prior Art -This package is based on a bunch of experiments with the [Google Cloud Platform -Terraform provider](https://github.com/terraform-providers/terraform-provider-google) +This package is based on a bunch of experiments with the [Terraform provider for Google Cloud](https://github.com/terraform-providers/terraform-provider-google) and the lessons learned while generating it. It is also based on prior art in the community: diff --git a/tools/issue-labeler/labeler/enrolled_teams.yml b/tools/issue-labeler/labeler/enrolled_teams.yml index de8934d4019e..f2d88536d0c8 100755 --- a/tools/issue-labeler/labeler/enrolled_teams.yml +++ b/tools/issue-labeler/labeler/enrolled_teams.yml @@ -181,6 +181,7 @@ service/compute-l7-load-balancer: - google_compute_target_https_proxy - google_compute_region_target_https_proxy service/compute-managed: + team: gcp-managed-instance-groups-terraform resources: - google_compute_instance_group_manager.* - google_compute_region_instance_group_manager.* diff --git a/tools/missing-test-detector/detector.go b/tools/missing-test-detector/detector.go index 72de96216d5f..2af06c169781 100644 --- a/tools/missing-test-detector/detector.go +++ b/tools/missing-test-detector/detector.go @@ -5,6 +5,7 @@ import ( "sort" "strings" + "github.com/GoogleCloudPlatform/magic-modules/tools/missing-test-detector/reader" "github.com/hashicorp/hcl/v2/hclwrite" "github.com/zclconf/go-cty/cty" ) @@ -19,7 +20,7 @@ type FieldSet map[string]struct{} // Detect missing tests for the given resource changes map in the given slice of tests. // Return a map of resource names to missing test info about that resource. -func detectMissingTests(changedFields map[string]ResourceChanges, allTests []*Test) (map[string]*MissingTestInfo, error) { +func detectMissingTests(changedFields map[string]ResourceChanges, allTests []*reader.Test) (map[string]*MissingTestInfo, error) { resourceNamesToTests := make(map[string][]string) for _, test := range allTests { for _, step := range test.Steps { @@ -51,13 +52,13 @@ func detectMissingTests(changedFields map[string]ResourceChanges, allTests []*Te return missingTests, nil } -func markCoverage(fieldCoverage ResourceChanges, config Resource) error { +func markCoverage(fieldCoverage ResourceChanges, config reader.Resource) error { for fieldName, fieldValue := range config { if coverage, ok := fieldCoverage[fieldName]; ok { if field, ok := coverage.(*Field); ok { field.Tested = true } else if objectCoverage, ok := coverage.(ResourceChanges); ok { - if fieldValueConfig, ok := fieldValue.(Resource); ok { + if fieldValueConfig, ok := fieldValue.(reader.Resource); ok { if err := markCoverage(objectCoverage, fieldValueConfig); err != nil { return fmt.Errorf("error parsing %q: %s", fieldName, err) } diff --git a/tools/missing-test-detector/detector_test.go b/tools/missing-test-detector/detector_test.go index 67855a3d44b7..69b6b1ec7a7c 100644 --- a/tools/missing-test-detector/detector_test.go +++ b/tools/missing-test-detector/detector_test.go @@ -3,10 +3,12 @@ package main import ( "reflect" "testing" + + "github.com/GoogleCloudPlatform/magic-modules/tools/missing-test-detector/reader" ) func TestDetectMissingTests(t *testing.T) { - allTests, errs := readAllTests("testdata") + allTests, errs := reader.ReadAllTests("reader/testdata") if len(errs) > 0 { t.Errorf("errors reading tests before testing detect missing tests: %v", errs) } diff --git a/tools/missing-test-detector/go.mod b/tools/missing-test-detector/go.mod index 3e25a00ea6e2..a074a5d1b471 100644 --- a/tools/missing-test-detector/go.mod +++ b/tools/missing-test-detector/go.mod @@ -1,4 +1,4 @@ -module github.com/trodge/magic-modules/tools/missing-test-detector +module github.com/GoogleCloudPlatform/magic-modules/tools/missing-test-detector go 1.20 diff --git a/tools/missing-test-detector/main.go b/tools/missing-test-detector/main.go index 16b6fdeba104..c8ba0672b02a 100644 --- a/tools/missing-test-detector/main.go +++ b/tools/missing-test-detector/main.go @@ -7,6 +7,7 @@ import ( "strings" "text/template" + "github.com/GoogleCloudPlatform/magic-modules/tools/missing-test-detector/reader" "github.com/golang/glog" ) @@ -15,7 +16,7 @@ var flagServicesDir = flag.String("services-dir", "", "directory where service d func main() { flag.Parse() - allTests, errs := readAllTests(*flagServicesDir) + allTests, errs := reader.ReadAllTests(*flagServicesDir) for path, err := range errs { glog.Infof("error reading path: %s, err: %v", path, err) } diff --git a/tools/missing-test-detector/reader.go b/tools/missing-test-detector/reader/reader.go similarity index 98% rename from tools/missing-test-detector/reader.go rename to tools/missing-test-detector/reader/reader.go index 7ef732636da6..0fa96586cb2b 100644 --- a/tools/missing-test-detector/reader.go +++ b/tools/missing-test-detector/reader/reader.go @@ -1,4 +1,4 @@ -package main +package reader import ( "fmt" @@ -33,7 +33,7 @@ func (t *Test) String() string { } // Return a slice of tests as well as a map of file or test names to errors encountered. -func readAllTests(servicesDir string) ([]*Test, map[string]error) { +func ReadAllTests(servicesDir string) ([]*Test, map[string]error) { dirs, err := os.ReadDir(servicesDir) if err != nil { return nil, map[string]error{servicesDir: err} @@ -52,7 +52,7 @@ func readAllTests(servicesDir string) ([]*Test, map[string]error) { testFileNames = append(testFileNames, filepath.Join(servicePath, file.Name())) } } - serviceTests, serviceErrs := readTestFiles(testFileNames) + serviceTests, serviceErrs := ReadTestFiles(testFileNames) for fileName, err := range serviceErrs { allErrs[fileName] = err } @@ -65,7 +65,7 @@ func readAllTests(servicesDir string) ([]*Test, map[string]error) { } // Read all the test files in a service directory together to capture cross-file function usage. -func readTestFiles(filenames []string) ([]*Test, map[string]error) { +func ReadTestFiles(filenames []string) ([]*Test, map[string]error) { funcDecls := make(map[string]*ast.FuncDecl) // map of function names to function declarations varDecls := make(map[string]*ast.BasicLit) // map of variable names to value expressions errs := make(map[string]error) // map of file or test names to errors encountered parsing diff --git a/tools/missing-test-detector/reader_test.go b/tools/missing-test-detector/reader/reader_test.go similarity index 92% rename from tools/missing-test-detector/reader_test.go rename to tools/missing-test-detector/reader/reader_test.go index b210c469a502..59a04c03c606 100644 --- a/tools/missing-test-detector/reader_test.go +++ b/tools/missing-test-detector/reader/reader_test.go @@ -1,4 +1,4 @@ -package main +package reader import ( "os" @@ -9,7 +9,7 @@ import ( // This test only ensures there isn't a panic reading tests in the provider. func TestReadAllTests(t *testing.T) { if servicesDir := os.Getenv("SERVICES_DIR"); servicesDir != "" { - _, errs := readAllTests(servicesDir) + _, errs := ReadAllTests(servicesDir) for path, err := range errs { t.Logf("path: %s, err: %v", path, err) } @@ -19,7 +19,7 @@ func TestReadAllTests(t *testing.T) { } func TestReadCoveredResourceTestFile(t *testing.T) { - tests, err := readTestFiles([]string{"testdata/service/covered_resource_test.go"}) + tests, err := ReadTestFiles([]string{"testdata/service/covered_resource_test.go"}) if err != nil { t.Fatalf("error reading covered resource test file: %v", err) } @@ -47,7 +47,7 @@ func TestReadCoveredResourceTestFile(t *testing.T) { } func TestReadConfigVariableTestFile(t *testing.T) { - tests, err := readTestFiles([]string{"testdata/service/config_variable_test.go"}) + tests, err := ReadTestFiles([]string{"testdata/service/config_variable_test.go"}) if err != nil { t.Fatalf("error reading config variable test file: %v", err) } @@ -67,7 +67,7 @@ func TestReadConfigVariableTestFile(t *testing.T) { } func TestReadMultipleResourcesTestFile(t *testing.T) { - tests, err := readTestFiles([]string{"testdata/service/multiple_resource_test.go"}) + tests, err := ReadTestFiles([]string{"testdata/service/multiple_resource_test.go"}) if err != nil { t.Fatalf("error reading multiple resources test file: %v", err) } @@ -101,7 +101,7 @@ func TestReadMultipleResourcesTestFile(t *testing.T) { } func TestReadSerialResourceTestFile(t *testing.T) { - tests, err := readTestFiles([]string{"testdata/service/serial_resource_test.go"}) + tests, err := ReadTestFiles([]string{"testdata/service/serial_resource_test.go"}) if err != nil { t.Fatalf("error reading serial resource test file: %v", err) } @@ -140,7 +140,7 @@ func TestReadSerialResourceTestFile(t *testing.T) { } func TestReadCrossFileTests(t *testing.T) { - tests, err := readTestFiles([]string{"testdata/service/cross_file_1_test.go", "testdata/service/cross_file_2_test.go"}) + tests, err := ReadTestFiles([]string{"testdata/service/cross_file_1_test.go", "testdata/service/cross_file_2_test.go"}) if err != nil { t.Fatalf("error reading cross file tests: %v", err) } @@ -183,7 +183,7 @@ func TestReadCrossFileTests(t *testing.T) { } func TestReadHelperFunctionCall(t *testing.T) { - tests, err := readTestFiles([]string{"testdata/service/function_call_test.go"}) + tests, err := ReadTestFiles([]string{"testdata/service/function_call_test.go"}) if err != nil { t.Fatalf("error reading function call test: %v", err) } @@ -193,7 +193,7 @@ func TestReadHelperFunctionCall(t *testing.T) { expectedTest := &Test{ Name: "TestAccFunctionCallResource", Steps: []Step{ - Step{ + { "helped_resource": Resources{ "primary": Resource{ "field_one": "\"value-one\"", diff --git a/tools/missing-test-detector/testdata/service/config_variable_test.go b/tools/missing-test-detector/reader/testdata/service/config_variable_test.go similarity index 100% rename from tools/missing-test-detector/testdata/service/config_variable_test.go rename to tools/missing-test-detector/reader/testdata/service/config_variable_test.go diff --git a/tools/missing-test-detector/testdata/service/covered_resource_test.go b/tools/missing-test-detector/reader/testdata/service/covered_resource_test.go similarity index 100% rename from tools/missing-test-detector/testdata/service/covered_resource_test.go rename to tools/missing-test-detector/reader/testdata/service/covered_resource_test.go diff --git a/tools/missing-test-detector/testdata/service/cross_file_1_test.go b/tools/missing-test-detector/reader/testdata/service/cross_file_1_test.go similarity index 100% rename from tools/missing-test-detector/testdata/service/cross_file_1_test.go rename to tools/missing-test-detector/reader/testdata/service/cross_file_1_test.go diff --git a/tools/missing-test-detector/testdata/service/cross_file_2_test.go b/tools/missing-test-detector/reader/testdata/service/cross_file_2_test.go similarity index 100% rename from tools/missing-test-detector/testdata/service/cross_file_2_test.go rename to tools/missing-test-detector/reader/testdata/service/cross_file_2_test.go diff --git a/tools/missing-test-detector/testdata/service/function_call_test.go b/tools/missing-test-detector/reader/testdata/service/function_call_test.go similarity index 100% rename from tools/missing-test-detector/testdata/service/function_call_test.go rename to tools/missing-test-detector/reader/testdata/service/function_call_test.go diff --git a/tools/missing-test-detector/testdata/service/multiple_resource_test.go b/tools/missing-test-detector/reader/testdata/service/multiple_resource_test.go similarity index 100% rename from tools/missing-test-detector/testdata/service/multiple_resource_test.go rename to tools/missing-test-detector/reader/testdata/service/multiple_resource_test.go diff --git a/tools/missing-test-detector/testdata/service/serial_resource_test.go b/tools/missing-test-detector/reader/testdata/service/serial_resource_test.go similarity index 100% rename from tools/missing-test-detector/testdata/service/serial_resource_test.go rename to tools/missing-test-detector/reader/testdata/service/serial_resource_test.go diff --git a/tools/missing-test-detector/testdata/service/uncovered_resource_test.go b/tools/missing-test-detector/reader/testdata/service/uncovered_resource_test.go similarity index 100% rename from tools/missing-test-detector/testdata/service/uncovered_resource_test.go rename to tools/missing-test-detector/reader/testdata/service/uncovered_resource_test.go diff --git a/tools/teamcity-diff-check/main.go b/tools/teamcity-diff-check/main.go new file mode 100644 index 000000000000..577bdda526ca --- /dev/null +++ b/tools/teamcity-diff-check/main.go @@ -0,0 +1,93 @@ +package main + +import ( + "bufio" + "flag" + "fmt" + "io" + "os" + "regexp" +) + +var serviceFile = flag.String("service_file", "services_ga", "kotlin service file to be parsed") + +func serviceDifference(gS, tS []string) []string { + t := make(map[string]struct{}, len(tS)) + for _, s := range tS { + t[s] = struct{}{} + } + + var diff []string + for _, s := range gS { + if _, found := t[s]; !found { + diff = append(diff, s) + } + } + + return diff +} + +func main() { + flag.Parse() + + file, err := os.Open(*serviceFile + ".txt") + if err != nil { + fmt.Println(err) + return + } + defer file.Close() + + googleServices := []string{} + scanner := bufio.NewScanner(file) + for scanner.Scan() { + googleServices = append(googleServices, scanner.Text()) + } + + //////////////////////////////////////////////////////////////////////////////// + + filePath := fmt.Sprintf("mmv1/third_party/terraform/.teamcity/components/inputs/%s.kt", *serviceFile) + f, err := os.Open(fmt.Sprintf("../../%s", filePath)) // Need to make path relative to where the script is called + if err != nil { + panic(err) + } + + // Get the file size + stat, err := f.Stat() + if err != nil { + fmt.Println(err) + return + } + + // Read the file into a byte slice + bs := make([]byte, stat.Size()) + _, err = bufio.NewReader(f).Read(bs) + if err != nil && err != io.EOF { + fmt.Println(err) + return + } + + // Regex pattern captures "services" from *serviceFile. + pattern := regexp.MustCompile(`(?m)"(?P\w+)"\sto\s+mapOf`) + + template := []byte("$service") + + dst := []byte{} + teamcityServices := []string{} + + // For each match of the regex in the content. + for _, submatches := range pattern.FindAllSubmatchIndex(bs, -1) { + service := pattern.Expand(dst, template, bs, submatches) + teamcityServices = append(teamcityServices, string(service)) + } + if len(teamcityServices) == 0 { + fmt.Fprintf(os.Stderr, "error: script could not find any services listed in the file %s.kt .\n", filePath) + os.Exit(1) + } + + if diff := serviceDifference(googleServices, teamcityServices); len(diff) != 0 { + fmt.Fprintf(os.Stderr, "error: missing services detected in %s\n", filePath) + fmt.Fprintf(os.Stderr, "Please update file to include these new services: %s\n", diff) + os.Exit(1) + } + +} diff --git a/tpgtools/README.md b/tpgtools/README.md index 3687768627a1..8ae74dddbfd9 100644 --- a/tpgtools/README.md +++ b/tpgtools/README.md @@ -1,7 +1,7 @@ # tpgtools `tpgtools` is the generator responsible for creating DCL-based resources in the -Terraform Google Provider (TPG). The DCL provides +Terraform provider for Google Cloud (TPG). The DCL provides [OpenAPI schema objects](https://swagger.io/specification/#schema-object) to describe the available types, and `tpgtools` uses those to construct Terraform resource schemas. @@ -55,7 +55,7 @@ go run . --path "api" --overrides "overrides" --output ~/some/dir --mode "serial ## New Resource Guide This guide is written to document the process for adding a resource to the -Google Terraform Provider (TPG) after it has been added to the +Terraform provider for Google Cloud (TPG) after it has been added to the [DCL](https://github.com/GoogleCloudPlatform/declarative-resource-client-library). ### Adding Resource Overrides diff --git a/tpgtools/go.mod b/tpgtools/go.mod index 14c13f11e191..90f55b270e1e 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.20 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.62.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.63.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index 86a5ac602ab9..30d683a7cf40 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -6,12 +6,8 @@ cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdi cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.60.0 h1:RFZs9I3tXewC7cJf8RKbUMpQZO6jWZ9SHSnNd+auxsQ= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.60.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.61.0 h1:IAr9UlYbxURIYABRMagXXo8pDlkFNFFXWz5J2+srrnc= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.61.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.62.0 h1:s4Y6r6RrYLBnqosGXLwR0h1Gqr0VT3wgd6rqvHsD9OE= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.62.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.63.0 h1:eSOBYPZVnU2fZul9sAJFGLVCgv6stNVKkmsogKF7UeY= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.63.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= diff --git a/tpgtools/override.go b/tpgtools/override.go index a190f8026e7e..5ce55962733c 100644 --- a/tpgtools/override.go +++ b/tpgtools/override.go @@ -79,6 +79,7 @@ const ( CustomListSize = "CUSTOM_LIST_SIZE_CONSTRAINT" CustomDefault = "CUSTOM_DEFAULT" CustomSchemaValues = "CUSTOM_SCHEMA_VALUES" + ComplexMapKey = "COMPLEX_MAP_KEY_NAME" ) // Overrides represents the type a resource's override file can be marshalled diff --git a/tpgtools/override_details.go b/tpgtools/override_details.go index 12ee16832a59..963c23656e92 100644 --- a/tpgtools/override_details.go +++ b/tpgtools/override_details.go @@ -230,3 +230,8 @@ type StateUpgradeDetails struct { // The current schema version SchemaVersion int } + +type ComplexMapKeyDetails struct { + // The name of the key as exposed by Terraform + KeyName string +} diff --git a/tpgtools/overrides/gkehub/beta/feature_membership.yaml b/tpgtools/overrides/gkehub/beta/feature_membership.yaml index 3eac7364e119..8acee09e053a 100644 --- a/tpgtools/overrides/gkehub/beta/feature_membership.yaml +++ b/tpgtools/overrides/gkehub/beta/feature_membership.yaml @@ -29,3 +29,11 @@ details: functions: - tpgresource.DefaultProviderProject +- type: COMPLEX_MAP_KEY_NAME + field: policycontroller.policy_controller_hub_config.policy_content.bundles + details: + keyname: bundle_name +- type: COMPLEX_MAP_KEY_NAME + field: policycontroller.policy_controller_hub_config.deployment_configs + details: + keyname: component_name \ No newline at end of file diff --git a/tpgtools/overrides/gkehub/feature_membership.yaml b/tpgtools/overrides/gkehub/feature_membership.yaml index 2e8b8f32503c..cd6cf1876fcc 100644 --- a/tpgtools/overrides/gkehub/feature_membership.yaml +++ b/tpgtools/overrides/gkehub/feature_membership.yaml @@ -24,4 +24,12 @@ field: mesh.control_plane details: message: >- - Deprecated in favor of the `management` field \ No newline at end of file + Deprecated in favor of the `management` field +- type: COMPLEX_MAP_KEY_NAME + field: policycontroller.policy_controller_hub_config.policy_content.bundles + details: + keyname: bundle_name +- type: COMPLEX_MAP_KEY_NAME + field: policycontroller.policy_controller_hub_config.deployment_configs + details: + keyname: component_name \ No newline at end of file diff --git a/tpgtools/overrides/networkconnectivity/beta/hub.yaml b/tpgtools/overrides/networkconnectivity/beta/hub.yaml index af851d79a0c9..282feed97e5f 100644 --- a/tpgtools/overrides/networkconnectivity/beta/hub.yaml +++ b/tpgtools/overrides/networkconnectivity/beta/hub.yaml @@ -1,3 +1,4 @@ +- type: NO_SWEEPER - type: CUSTOMIZE_DIFF details: functions: diff --git a/tpgtools/overrides/networkconnectivity/beta/spoke.yaml b/tpgtools/overrides/networkconnectivity/beta/spoke.yaml index af851d79a0c9..282feed97e5f 100644 --- a/tpgtools/overrides/networkconnectivity/beta/spoke.yaml +++ b/tpgtools/overrides/networkconnectivity/beta/spoke.yaml @@ -1,3 +1,4 @@ +- type: NO_SWEEPER - type: CUSTOMIZE_DIFF details: functions: diff --git a/tpgtools/property.go b/tpgtools/property.go index f7939be2a3d9..62e5aa4fa51c 100644 --- a/tpgtools/property.go +++ b/tpgtools/property.go @@ -107,6 +107,10 @@ type Property struct { // Sub-properties of nested objects or arrays with nested objects Properties []Property + // If this is a complex map type, this string represents the name of the + // field that the key to the map can be set with + ComplexMapKeyName string + // Reference to the parent resource. // note: "Properties" will not be available. resource *Resource @@ -198,13 +202,24 @@ func (p Property) ObjectType() string { } func (p Property) IsArray() bool { - return (p.Type.String() == SchemaTypeList || p.Type.String() == SchemaTypeSet) && !p.Type.IsObject() + return (p.Type.String() == SchemaTypeList || p.Type.String() == SchemaTypeSet) && !p.Type.IsObject() && !p.IsComplexMap() } func (t Type) IsSet() bool { return t.String() == SchemaTypeSet } +// Complex map is for maps of string --> object that are supported in DCL but +// not in Terraform. We handle this by adding a field in the Terraform schema +// for the key in the map. This must be added via a COMPLEX_MAP_KEY_NAME +// override +func (t Type) IsComplexMap() bool { + if t.typ.AdditionalProperties != nil { + return t.typ.AdditionalProperties.Type != "string" + } + return false +} + // ShouldGenerateNestedSchema returns true if an object's nested schema function should be generated. func (p Property) ShouldGenerateNestedSchema() bool { return len(p.Properties) > 0 && !p.Collapsed @@ -278,6 +293,9 @@ func buildGetter(p Property, rawGetter string) string { if p.Type.IsEnumArray() { return fmt.Sprintf("expand%s%sArray(%s)", p.resource.PathType(), p.PackagePath(), rawGetter) } + if p.Type.IsComplexMap() { + return fmt.Sprintf("expand%s%sMap(%s)", p.resource.PathType(), p.PackagePath(), rawGetter) + } if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "string" { return fmt.Sprintf("tpgdclresource.ExpandStringArray(%s)", rawGetter) } @@ -317,6 +335,9 @@ func (p Property) DefaultStateSetter() string { return fmt.Sprintf("d.Set(%q, res.%s)", p.Name(), p.PackageName) case SchemaTypeList, SchemaTypeSet: + if p.IsComplexMap() { + return fmt.Sprintf("d.Set(%q, flatten%s%sMap(res.%s))", p.Name(), p.resource.PathType(), p.PackagePath(), p.PackageName) + } if p.typ.Items != nil && ((p.typ.Items.Type == "string" && len(p.typ.Items.Enum) == 0) || p.typ.Items.Type == "integer") { return fmt.Sprintf("d.Set(%q, res.%s)", p.Name(), p.PackageName) } @@ -365,6 +386,9 @@ func (p Property) flattenGetterWithParent(parent string) string { if p.Type.IsEnumArray() { return fmt.Sprintf("flatten%s%sArray(obj.%s)", p.resource.PathType(), p.PackagePath(), p.PackageName) } + if p.Type.IsComplexMap() { + return fmt.Sprintf("flatten%s%sMap(%s.%s)", p.resource.PathType(), p.PackagePath(), parent, p.PackageName) + } if p.Type.typ.Items != nil && p.Type.typ.Items.Type == "integer" { return fmt.Sprintf("%s.%s", parent, p.PackageName) } @@ -376,7 +400,6 @@ func (p Property) flattenGetterWithParent(parent string) string { return fmt.Sprintf("flatten%s%sArray(%s.%s)", p.resource.PathType(), p.PackagePath(), parent, p.PackageName) } } - if p.typ.Type == "object" { return fmt.Sprintf("flatten%s%s(%s.%s)", p.resource.PathType(), p.PackagePath(), parent, p.PackageName) } @@ -651,6 +674,38 @@ func createPropertiesFromSchema(schema *openapi.Schema, typeFetcher *TypeFetcher p.ElemIsBasicType = true } } + // Complex maps are represented as TypeSet but don't have v.Items set. + // Use AdditionalProperties instead, and add an additional `name` field + // that represents the key in the map + if p.Type.IsComplexMap() { + props, err := createPropertiesFromSchema(p.Type.typ.AdditionalProperties, typeFetcher, overrides, resource, &p, location) + if err != nil { + return nil, err + } + cm := ComplexMapKeyDetails{} + cmOk, err := overrides.PropertyOverrideWithDetails(ComplexMapKey, p, &cm, location) + if err != nil { + return nil, fmt.Errorf("failed to decode complex map key name details") + } + if !cmOk { + return nil, fmt.Errorf("failed to find complex map key name for map named: %s", p.Name()) + } + keyProp := Property{ + title: cm.KeyName, + Type: Type{&openapi.Schema{Type: "string"}}, + resource: resource, + parent: &p, + Required: true, + Description: "The name for the key in the map for which this object is mapped to in the API", + } + props = append([]Property{keyProp}, props...) + + p.Properties = props + e := fmt.Sprintf("%s%sSchema()", resource.PathType(), p.PackagePath()) + p.Elem = &e + p.ElemIsBasicType = false + p.ComplexMapKeyName = cm.KeyName + } if !p.Computed { if stringInSlice(v.Title, schema.Required) { @@ -779,7 +834,7 @@ func createPropertiesFromSchema(schema *openapi.Schema, typeFetcher *TypeFetcher p.ValidateFunc = &vf.Function } - if p.Type.String() == SchemaTypeSet { + if p.Type.IsSet() { shf := SetHashFuncDetails{} shfOk, err := overrides.PropertyOverrideWithDetails(SetHashFunc, p, &shf, location) if err != nil { diff --git a/tpgtools/templates/resource.go.tmpl b/tpgtools/templates/resource.go.tmpl index 8de8fa398c60..b65085ba771f 100644 --- a/tpgtools/templates/resource.go.tmpl +++ b/tpgtools/templates/resource.go.tmpl @@ -661,6 +661,39 @@ func expand{{$.PathType}}{{$v.PackagePath}}Array(o interface{}) []{{$.Package}}. items = append(items, *i) } + return items +} + {{- end }} + + {{ if $v.IsComplexMap -}} +func expand{{$.PathType}}{{$v.PackagePath}}Map(o interface{}) map[string]{{$.Package}}.{{$v.ObjectType}} { + if o == nil { + {{- if $v.Computed }} + return nil + {{- else }} + return make(map[string]{{$.Package}}.{{$v.ObjectType}}) + {{- end }} + } + + o = o.(*schema.Set).List() + + objs := o.([]interface{}) + if len(objs) == 0 || objs[0] == nil { + {{- if $v.Computed }} + return nil + {{- else }} + return make(map[string]{{$.Package}}.{{$v.ObjectType}}) + {{- end }} + } + + items := make(map[string]{{$.Package}}.{{$v.ObjectType}}) + for _, item := range objs { + i := expand{{$.PathType}}{{$v.PackagePath}}(item) + if item != nil { + items[item.(map[string]interface{})["{{$v.ComplexMapKeyName}}"].(string)] = *i + } + } + return items } {{- end }} @@ -688,7 +721,7 @@ func expand{{$.PathType}}{{$v.PackagePath}}(o interface{}) *{{$.Package}}.{{$v.O {{- end }} return &{{$.Package}}.{{$v.ObjectType}}{ {{- range $p := $v.Properties }} - {{- if and ($p.Settable) ($p.ExpandGetter) }} + {{- if and ($p.Settable) ($p.ExpandGetter) (or (not $v.IsComplexMap) (ne $p.Name $v.ComplexMapKeyName)) }} {{$p.PackageName}}: {{$p.ExpandGetter}}, {{- end -}} {{ end }} @@ -713,17 +746,36 @@ func flatten{{$.PathType}}{{$v.PackagePath}}Array(objs []{{$.Package}}.{{$v.Obje } {{- end }} -func flatten{{$.PathType}}{{$v.PackagePath}}(obj *{{$.Package}}.{{$v.ObjectType}}) interface{} { - if obj == nil || obj.Empty(){ + {{ if $v.IsComplexMap -}} +func flatten{{$.PathType}}{{$v.PackagePath}}Map(objs map[string]{{$.Package}}.{{$v.ObjectType}}) []interface{} { + if objs == nil { + return nil + } + + items := []interface{}{} + for name, item := range objs { + i := flatten{{$.PathType}}{{$v.PackagePath}}(&item, name) + items = append(items, i) + } + + return items +} + {{- end }} + +func flatten{{$.PathType}}{{$v.PackagePath}}(obj *{{$.Package}}.{{$v.ObjectType}}{{- if $v.IsComplexMap -}}, name string{{- end -}}) interface{} { + if obj == nil {{- if not $v.IsComplexMap -}}|| obj.Empty(){{- end -}}{ return nil } transformed := map[string]interface{}{ {{- range $p := $v.Properties }} - {{- if ($p.FlattenGetter) }} + {{- if or (not $v.IsComplexMap) (ne $p.Name $v.ComplexMapKeyName) }} "{{$p.Name}}": {{$p.FlattenGetter}}, {{- end -}} {{ end }} } +{{ if $v.IsComplexMap }} + transformed["{{$v.ComplexMapKeyName}}"] = name +{{ end }} {{ if $v.IsObject }} return []interface{}{transformed} {{ else }} diff --git a/tpgtools/type.go b/tpgtools/type.go index 47adedaa30c9..cb31f0a7c562 100644 --- a/tpgtools/type.go +++ b/tpgtools/type.go @@ -67,14 +67,13 @@ func (t Type) String() string { } return "unknown number type" case "object": - // assume if this is set, it's a string -> string map for now. - // https://swagger.io/docs/specification/data-models/dictionaries/ - // describes the behaviour of AdditionalProperties for type: object if t.typ.AdditionalProperties != nil { if v := t.typ.AdditionalProperties.Type; v == "string" { return SchemaTypeMap } else { - return fmt.Sprintf("unknown AdditionalProperties: %q", v) + // Complex maps are handled as sets with an extra value for the + // name of the object + return SchemaTypeSet } } return SchemaTypeList