diff --git a/.ci/magician/cmd/test_terraform_vcr.go b/.ci/magician/cmd/test_terraform_vcr.go index 9bc4d850e996..c9d9c8df526d 100644 --- a/.ci/magician/cmd/test_terraform_vcr.go +++ b/.ci/magician/cmd/test_terraform_vcr.go @@ -7,6 +7,7 @@ import ( "regexp" "sort" "strings" + "text/template" "github.com/spf13/cobra" @@ -15,6 +16,21 @@ import ( "magician/provider" "magician/source" "magician/vcr" + + _ "embed" +) + +var ( + //go:embed test_terraform_vcr_test_analytics.tmpl + testsAnalyticsTmplText string + //go:embed test_terraform_vcr_non_exercised_tests.tmpl + nonExercisedTestsTmplText string + //go:embed test_terraform_vcr_with_replay_failed_tests.tmpl + withReplayFailedTestsTmplText string + //go:embed test_terraform_vcr_without_replay_failed_tests.tmpl + withoutReplayFailedTestsTmplText string + //go:embed test_terraform_vcr_record_replay.tmpl + recordReplayTmplText string ) var ttvEnvironmentVariables = [...]string{ @@ -40,6 +56,37 @@ var ttvEnvironmentVariables = [...]string{ "USER", } +type analytics struct { + ReplayingResult *vcr.Result + RunFullVCR bool + AffectedServices []string +} + +type nonExercisedTests struct { + NotRunBetaTests []string + NotRunGATests []string +} + +type withReplayFailedTests struct { + ReplayingResult *vcr.Result +} + +type withoutReplayFailedTests struct { + ReplayingErr error + PRNumber string + BuildID string +} + +type recordReplay struct { + RecordingResult *vcr.Result + ReplayingAfterRecordingResult *vcr.Result + HasTerminatedTests bool + RecordingErr error + AllRecordingPassed bool + PRNumber string + BuildID string +} + var testTerraformVCRCmd = &cobra.Command{ Use: "test-terraform-vcr", Short: "Run vcr tests for affected packages", @@ -143,7 +190,7 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, return fmt.Errorf("error posting pending status: %w", err) } - replayingResult, affectedServicesComment, testDirs, replayingErr := runReplaying(runFullVCR, services, vt) + replayingResult, testDirs, replayingErr := runReplaying(runFullVCR, services, vt) testState := "success" if replayingErr != nil { testState = "failure" @@ -159,55 +206,41 @@ func execTestTerraformVCR(prNumber, mmCommitSha, buildID, projectID, buildStep, return nil } - failedTestsPattern := strings.Join(replayingResult.FailedTests, "|") - - comment := `#### Tests analytics -Total tests: ` + fmt.Sprintf("`%d`", len(replayingResult.PassedTests)+len(replayingResult.SkippedTests)+len(replayingResult.FailedTests)) + ` -Passed tests: ` + fmt.Sprintf("`%d`", len(replayingResult.PassedTests)) + ` -Skipped tests: ` + fmt.Sprintf("`%d`", len(replayingResult.SkippedTests)) + ` -Affected tests: ` + fmt.Sprintf("`%d`", len(replayingResult.FailedTests)) + ` - -
Click here to see the affected service packages
` + affectedServicesComment + `
` + var servicesArr []string + for s := range services { + servicesArr = append(servicesArr, s) + } + analyticsData := analytics{ + ReplayingResult: replayingResult, + RunFullVCR: runFullVCR, + AffectedServices: sort.StringSlice(servicesArr), + } + testsAnalyticsComment, err := formatTestsAnalytics(analyticsData) + if err != nil { + return fmt.Errorf("error formatting test_analytics comment: %w", err) + } notRunBeta, notRunGa := notRunTests(tpgRepo.UnifiedZeroDiff, tpgbRepo.UnifiedZeroDiff, replayingResult) - if len(notRunBeta) > 0 || len(notRunGa) > 0 { - comment += ` - -#### Non-exercised tests` - - if len(notRunBeta) > 0 { - comment += ` - -Tests were added that are skipped in VCR: -` - for _, t := range notRunBeta { - comment += ` -- ` + t - } - } - - if len(notRunGa) > 0 { - comment += ` - -Tests were added that are GA-only additions and require manual runs: -` - for _, t := range notRunGa { - comment += ` -- ` + t - } - } + nonExercisedTestsData := nonExercisedTests{ + NotRunBetaTests: notRunBeta, + NotRunGATests: notRunGa, + } + nonExercisedTestsComment, err := formatNonExercisedTests(nonExercisedTestsData) + if err != nil { + return fmt.Errorf("error formatting non exercised tests comment: %w", err) } if len(replayingResult.FailedTests) > 0 { - comment += fmt.Sprintf(` - - -#### Action taken -
Found %d affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests
%s
- -[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/docs/getting-started/contributing/#general-contributing-steps)`, len(replayingResult.FailedTests), failedTestsPattern) + withReplayFailedTestsData := withReplayFailedTests{ + ReplayingResult: replayingResult, + } + withReplayFailedTestsComment, err := formatWithReplayFailedTests(withReplayFailedTestsData) + if err != nil { + return fmt.Errorf("error formatting action taken comment: %w", err) + } + comment := strings.Join([]string{testsAnalyticsComment, nonExercisedTestsComment, withReplayFailedTestsComment}, "\n") if err := gh.PostComment(prNumber, comment); err != nil { return fmt.Errorf("error posting comment: %w", err) } @@ -233,15 +266,10 @@ Tests were added that are GA-only additions and require manual runs: return nil } - comment = "" + var replayingAfterRecordingResult *vcr.Result + var replayingAfterRecordingErr error if len(recordingResult.PassedTests) > 0 { - comment += "$\\textcolor{green}{\\textsf{Tests passed during RECORDING mode:}}$\n" - for _, passedTest := range recordingResult.PassedTests { - comment += fmt.Sprintf("`%s`[[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-%s/artifacts/%s/recording/%s.log)]\n", passedTest, prNumber, buildID, passedTest) - } - comment += "\n\n" - - replayingAfterRecordingResult, replayingAfterRecordingErr := vt.RunParallel(vcr.Replaying, provider.Beta, testDirs, recordingResult.PassedTests) + replayingAfterRecordingResult, replayingAfterRecordingErr = vt.RunParallel(vcr.Replaying, provider.Beta, testDirs, recordingResult.PassedTests) if replayingAfterRecordingErr != nil { testState = "failure" } @@ -249,61 +277,43 @@ Tests were added that are GA-only additions and require manual runs: if err := vt.UploadLogs("ci-vcr-logs", prNumber, buildID, true, true, vcr.Replaying, provider.Beta); err != nil { return fmt.Errorf("error uploading recording logs: %w", err) } - - if len(replayingAfterRecordingResult.FailedTests) > 0 { - comment += "$\\textcolor{red}{\\textsf{Tests failed when rerunning REPLAYING mode:}}$\n" - for _, failedTest := range replayingAfterRecordingResult.FailedTests { - comment += fmt.Sprintf("`%s`[[Error message](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-%s/artifacts/%s/build-log/replaying_build_after_recording/%s_replaying_test.log)] [[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-%s/artifacts/%s/replaying_after_recording/%s.log)]\n", failedTest, prNumber, buildID, failedTest, prNumber, buildID, failedTest) - } - comment += "\n\n" - comment += `Tests failed due to non-determinism or randomness when the VCR replayed the response after the HTTP request was made. - -Please fix these to complete your PR. If you believe these test failures to be incorrect or unrelated to your change, or if you have any questions, please raise the concern with your reviewer. -` - } else { - comment += "$\\textcolor{green}{\\textsf{No issues found for passed tests after REPLAYING rerun.}}$\n" - } - comment += "\n---\n" - } - if len(recordingResult.FailedTests) > 0 { - comment += "$\\textcolor{red}{\\textsf{Tests failed during RECORDING mode:}}$\n" - for _, failedTest := range recordingResult.FailedTests { - comment += fmt.Sprintf("`%s`[[Error message](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-%s/artifacts/%s/build-log/recording_build/%s_recording_test.log)] [[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-%s/artifacts/%s/recording/%s.log)]\n", failedTest, prNumber, buildID, failedTest, prNumber, buildID, failedTest) - } - comment += "\n\n" - if len(recordingResult.PassedTests)+len(recordingResult.FailedTests) < len(replayingResult.FailedTests) { - comment += "$\\textcolor{red}{\\textsf{Several tests got terminated during RECORDING mode.}}$\n" - } - comment += "$\\textcolor{red}{\\textsf{Please fix these to complete your PR.}}$\n" - } else { - if len(recordingResult.PassedTests)+len(recordingResult.FailedTests) < len(replayingResult.FailedTests) { - comment += "$\\textcolor{red}{\\textsf{Several tests got terminated during RECORDING mode.}}$\n" - } else if recordingErr != nil { - // Check for any uncaught errors in RECORDING mode. - comment += "$\\textcolor{red}{\\textsf{Errors occurred during RECORDING mode. Please fix them to complete your PR.}}$\n" - } else { - comment += "$\\textcolor{green}{\\textsf{All tests passed!}}$\n" - } + hasTerminatedTests := (len(recordingResult.PassedTests) + len(recordingResult.FailedTests)) < len(replayingResult.FailedTests) + allRecordingPassed := len(recordingResult.FailedTests) == 0 && !hasTerminatedTests && recordingErr == nil + + recordReplayData := recordReplay{ + RecordingResult: recordingResult, + ReplayingAfterRecordingResult: replayingAfterRecordingResult, + RecordingErr: recordingErr, + HasTerminatedTests: hasTerminatedTests, + AllRecordingPassed: allRecordingPassed, + PRNumber: prNumber, + BuildID: buildID, + } + recordReplayComment, err := formatRecordReplay(recordReplayData) + if err != nil { + return fmt.Errorf("error formatting record replay comment: %w", err) + } + if err := gh.PostComment(prNumber, recordReplayComment); err != nil { + return fmt.Errorf("error posting comment: %w", err) } - comment += fmt.Sprintf("View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-%s/artifacts/%s/build-log/recording_test.log) or the [debug log](https://console.cloud.google.com/storage/browser/ci-vcr-logs/beta/refs/heads/auto-pr-%s/artifacts/%s/recording) for each test", prNumber, buildID, prNumber, buildID) - } else { - // Add newlines so that the color formatting will work properly. - comment += ` + } else { // len(replayingResult.FailedTests) == 0 + withoutReplayFailedTestsData := withoutReplayFailedTests{ + ReplayingErr: replayingErr, + PRNumber: prNumber, + BuildID: buildID, + } + withoutReplayFailedTestsComment, err := formatWithoutReplayFailedTests(withoutReplayFailedTestsData) + if err != nil { + return fmt.Errorf("error formatting action taken comment: %w", err) + } -` - if replayingErr != nil { - // Check for any uncaught errors in REPLAYING mode. - comment += "$\\textcolor{red}{\\textsf{Errors occurred during REPLAYING mode. Please fix them to complete your PR.}}$\n" - } else { - comment += "$\\textcolor{green}{\\textsf{All tests passed!}}$\n" + comment := strings.Join([]string{testsAnalyticsComment, nonExercisedTestsComment, withoutReplayFailedTestsComment}, "\n") + if err := gh.PostComment(prNumber, comment); err != nil { + return fmt.Errorf("error posting comment: %w", err) } - comment += fmt.Sprintf("View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-%s/artifacts/%s/build-log/replaying_test.log)", prNumber, buildID) - } - if err := gh.PostComment(prNumber, comment); err != nil { - return fmt.Errorf("error posting comment: %w", err) } if err := gh.PostBuildStatus(prNumber, "VCR-test", testState, buildStatusTargetURL, mmCommitSha); err != nil { @@ -379,17 +389,14 @@ func modifiedPackages(changedFiles []string) (map[string]struct{}, bool) { return services, runFullVCR } -func runReplaying(runFullVCR bool, services map[string]struct{}, vt *vcr.Tester) (*vcr.Result, string, []string, error) { +func runReplaying(runFullVCR bool, services map[string]struct{}, vt *vcr.Tester) (*vcr.Result, []string, error) { var result *vcr.Result - affectedServicesComment := "None" var testDirs []string var replayingErr error if runFullVCR { fmt.Println("run full VCR tests") - affectedServicesComment = "all service packages are affected" result, replayingErr = vt.Run(vcr.Replaying, provider.Beta, nil) } else if len(services) > 0 { - affectedServicesComment = "" } - return result, affectedServicesComment, testDirs, replayingErr + return result, testDirs, replayingErr } func handlePanics(prNumber, buildID, buildStatusTargetURL, mmCommitSha string, result *vcr.Result, mode vcr.Mode, gh GithubClient) (bool, error) { @@ -430,3 +435,40 @@ View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/head func init() { rootCmd.AddCommand(testTerraformVCRCmd) } + +func formatComment(fileName string, tmplText string, data any) (string, error) { + funcs := template.FuncMap{ + "join": strings.Join, + "add": func(i, j int) int { return i + j }, + } + tmpl, err := template.New(fileName).Funcs(funcs).Parse(tmplText) + if err != nil { + panic(fmt.Sprintf("Unable to parse %s: %s", fileName, err)) + } + sb := new(strings.Builder) + err = tmpl.Execute(sb, data) + if err != nil { + return "", err + } + return strings.TrimSpace(sb.String()), nil +} + +func formatTestsAnalytics(data analytics) (string, error) { + return formatComment("test_terraform_vcr_test_analytics.tmpl", testsAnalyticsTmplText, data) +} + +func formatNonExercisedTests(data nonExercisedTests) (string, error) { + return formatComment("test_terraform_vcr_recording_mode_results.tmpl", nonExercisedTestsTmplText, data) +} + +func formatWithReplayFailedTests(data withReplayFailedTests) (string, error) { + return formatComment("test_terraform_vcr_with_replay_failed_tests.tmpl", withReplayFailedTestsTmplText, data) +} + +func formatWithoutReplayFailedTests(data withoutReplayFailedTests) (string, error) { + return formatComment("test_terraform_vcr_without_replay_failed_tests.tmpl", withoutReplayFailedTestsTmplText, data) +} + +func formatRecordReplay(data recordReplay) (string, error) { + return formatComment("test_terraform_vcr_record_replay.tmpl", recordReplayTmplText, data) +} diff --git a/.ci/magician/cmd/test_terraform_vcr_non_exercised_tests.tmpl b/.ci/magician/cmd/test_terraform_vcr_non_exercised_tests.tmpl new file mode 100644 index 000000000000..b3a6a21c02c5 --- /dev/null +++ b/.ci/magician/cmd/test_terraform_vcr_non_exercised_tests.tmpl @@ -0,0 +1,13 @@ +{{- if or (gt (len .NotRunBetaTests) 0) (gt (len .NotRunGATests) 0) -}} +#### Non-exercised tests + +{{if gt (len .NotRunBetaTests) 0 -}} +Tests were added that are skipped in VCR: +{{range .NotRunBetaTests}}{{. | printf "- %s\n"}}{{end}} +{{end}} + +{{if gt (len .NotRunGATests) 0 -}} +Tests were added that are GA-only additions and require manual runs: +{{range .NotRunGATests}}{{. | printf "- %s\n"}}{{end}} +{{end}} +{{end}} diff --git a/.ci/magician/cmd/test_terraform_vcr_record_replay.tmpl b/.ci/magician/cmd/test_terraform_vcr_record_replay.tmpl new file mode 100644 index 000000000000..d5c7535798f8 --- /dev/null +++ b/.ci/magician/cmd/test_terraform_vcr_record_replay.tmpl @@ -0,0 +1,31 @@ +{{- if gt (len .RecordingResult.PassedTests) 0 -}} +$\textcolor{green}{\textsf{Tests passed during RECORDING mode:}}$ +{{range .RecordingResult.PassedTests}}`{{.}}`[[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-{{$.PRNumber}}/artifacts/{{$.BuildID}}/recording/{{.}}.log)] +{{end}} + +{{- if gt (len .ReplayingAfterRecordingResult.FailedTests ) 0 -}} +$\textcolor{red}{\textsf{Tests failed when rerunning REPLAYING mode:}}$ +{{range .ReplayingAfterRecordingResult.FailedTests}}`{{.}}`[[Error message](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-{{$.PRNumber}}/artifacts/{{$.BuildID}}/build-log/replaying_build_after_recording/{{.}}_replaying_test.log)] [[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-{{$.PRNumber}}/artifacts/{{$.BuildID}}/replaying_after_recording/{{.}}.log)] +{{end}} + +Tests failed due to non-determinism or randomness when the VCR replayed the response after the HTTP request was made. + +Please fix these to complete your PR. If you believe these test failures to be incorrect or unrelated to your change, or if you have any questions, please raise the concern with your reviewer. + +{{else}} +$\textcolor{green}{\textsf{No issues found for passed tests after REPLAYING rerun.}}$ +{{end}}{{/* end of if gt (len .ReplayingAfterRecordingResult.FailedTests ) 0 */}} +--- +{{end}}{{/* end of if gt (len .RecordingResult.PassedTests) 0 */}} + +{{if gt (len .RecordingResult.FailedTests) 0 -}} +$\textcolor{red}{\textsf{Tests failed during RECORDING mode:}}$ +{{range .RecordingResult.FailedTests}}`{{.}}`[[Error message](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-{{$.PRNumber}}/artifacts/{{$.BuildID}}/build-log/recording_build/{{.}}_recording_test.log)] [[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-{{$.PRNumber}}/artifacts/{{$.BuildID}}/recording/{{.}}.log)] +{{end}} +{{end}} {{- /* end of if gt (len .RecordingResult.FailedTests) 0 */ -}} + +{{if .HasTerminatedTests}}$\textcolor{red}{\textsf{Several tests got terminated during RECORDING mode.}}${{end}} +{{if .RecordingErr}}$\textcolor{red}{\textsf{Errors occurred during RECORDING mode. Please fix them to complete your PR.}}${{end}} +{{if .AllRecordingPassed}}$\textcolor{green}{\textsf{All tests passed!}}${{end}} + +View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-{{.PRNumber}}/artifacts/{{.BuildID}}/build-log/recording_test.log) or the [debug log](https://console.cloud.google.com/storage/browser/ci-vcr-logs/beta/refs/heads/auto-pr-{{.PRNumber}}/artifacts/{{.BuildID}}/recording) for each test diff --git a/.ci/magician/cmd/test_terraform_vcr_test.go b/.ci/magician/cmd/test_terraform_vcr_test.go index feb140fff4b0..604ec2feb06c 100644 --- a/.ci/magician/cmd/test_terraform_vcr_test.go +++ b/.ci/magician/cmd/test_terraform_vcr_test.go @@ -1,9 +1,12 @@ package cmd import ( + "fmt" "reflect" + "strings" "testing" + "github.com/google/go-cmp/cmp" "github.com/stretchr/testify/assert" "magician/vcr" @@ -213,3 +216,402 @@ func TestNotRunTests(t *testing.T) { }) } } + +func TestAnalyticsComment(t *testing.T) { + tests := []struct { + name string + data analytics + want string + }{ + { + name: "run full vcr is false and no affected services", + data: analytics{ + ReplayingResult: &vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + SkippedTests: []string{"d", "e"}, + FailedTests: []string{"f"}, + }, + RunFullVCR: false, + AffectedServices: []string{}, + }, + want: strings.Join( + []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "", + "
", + "Click here to see the affected service packages", + "
", + "", + "None", + "", + "
", + "
", + }, + "\n", + ), + }, + { + name: "run full vcr is false and has affected services", + data: analytics{ + ReplayingResult: &vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + SkippedTests: []string{"d", "e"}, + FailedTests: []string{"f"}, + }, + RunFullVCR: false, + AffectedServices: []string{"svc-a", "svc-b"}, + }, + want: strings.Join( + []string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "", + "
", + "Click here to see the affected service packages", + "
", + "", + "", + "", + "
", + "
", + }, + "\n", + ), + }, + { + name: "run full vcr is true", + data: analytics{ + ReplayingResult: &vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + SkippedTests: []string{"d", "e"}, + FailedTests: []string{"f"}, + }, + RunFullVCR: true, + AffectedServices: []string{}, + }, + want: strings.Join([]string{ + "#### Tests analytics", + "Total tests: 6", + "Passed tests: 3", + "Skipped tests: 2", + "Affected tests: 1", + "", + "
", + "Click here to see the affected service packages", + "
", + "", + "All service packages are affected", + "", + "
", + "
", + }, + "\n", + ), + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatTestsAnalytics(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("formatTestsAnalytics() returned unexpected difference (-want +got):\n%s", diff) + } + }) + } +} + +func TestNonExercisedTestsComment(t *testing.T) { + tests := []struct { + name string + data nonExercisedTests + want string + }{ + { + name: "without non exercised tests", + data: nonExercisedTests{}, + want: strings.Join( + []string{}, + "\n", + ), + }, + { + name: "with not run beta tests", + data: nonExercisedTests{ + NotRunBetaTests: []string{"beta-1", "beta-2"}, + }, + want: strings.Join( + []string{ + "#### Non-exercised tests", + "", + "Tests were added that are skipped in VCR:", + "- beta-1", + "- beta-2", + }, + "\n", + ), + }, + { + name: "with not run ga tests", + data: nonExercisedTests{ + NotRunGATests: []string{"ga-1", "ga-2"}, + }, + want: strings.Join( + []string{ + "#### Non-exercised tests", + "", + "", + "", + "Tests were added that are GA-only additions and require manual runs:", + "- ga-1", + "- ga-2", + }, + "\n", + ), + }, + { + name: "with not run ga tests and not run beta tests", + data: nonExercisedTests{ + NotRunGATests: []string{"ga-1", "ga-2"}, + NotRunBetaTests: []string{"beta-1", "beta-2"}, + }, + want: strings.Join( + []string{ + "#### Non-exercised tests", + "", + "Tests were added that are skipped in VCR:", + "- beta-1", + "- beta-2", + "", + "", + "", + "Tests were added that are GA-only additions and require manual runs:", + "- ga-1", + "- ga-2", + }, + "\n", + ), + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatNonExercisedTests(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("formatNonExercisedTests() returned unexpected difference (-want +got):\n%s", diff) + } + }) + } +} + +func TestWithReplayFailedTests(t *testing.T) { + tests := []struct { + name string + data withReplayFailedTests + want string + }{ + { + name: "with failed tests", + data: withReplayFailedTests{ + ReplayingResult: &vcr.Result{ + FailedTests: []string{"a", "b"}, + }, + }, + want: strings.Join( + []string{ + "#### Action taken", + "
", + "Found 2 affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests", + "", + "
", + "", + "
", + "
", + "", + "[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/docs/getting-started/contributing/#general-contributing-steps)", + }, + "\n", + ), + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatWithReplayFailedTests(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("formatWithReplayFailedTests() returned unexpected difference (-want +got):\n%s", diff) + } + }) + } +} + +func TestWithoutReplayFailedTests(t *testing.T) { + tests := []struct { + name string + data withoutReplayFailedTests + want string + }{ + { + name: "with replay error", + data: withoutReplayFailedTests{ + ReplayingErr: fmt.Errorf("some error"), + BuildID: "build-123", + PRNumber: "pr-123", + }, + want: strings.Join( + []string{ + "$\\textcolor{red}{\\textsf{Errors occurred during REPLAYING mode. Please fix them to complete your PR.}}$", + "", + "View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/build-log/replaying_test.log)", + }, + "\n", + ), + }, + { + name: "without replay error", + data: withoutReplayFailedTests{ + BuildID: "build-123", + PRNumber: "pr-123", + }, + want: strings.Join( + []string{ + "$\\textcolor{green}{\\textsf{All tests passed!}}$", + "", + "View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/build-log/replaying_test.log)", + }, + "\n", + ), + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatWithoutReplayFailedTests(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("formatWithoutReplayFailedTests() returned unexpected difference (-want +got):\n%s", diff) + } + }) + } +} + +func TestRecordReplay(t *testing.T) { + tests := []struct { + name string + data recordReplay + want string + }{ + { + name: "ReplayingAfterRecordingResult has failed tests", + data: recordReplay{ + RecordingResult: &vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + FailedTests: []string{"d", "e"}, + }, + ReplayingAfterRecordingResult: &vcr.Result{ + PassedTests: []string{"a"}, + FailedTests: []string{"b", "c"}, + }, + HasTerminatedTests: true, + RecordingErr: fmt.Errorf("some error"), + BuildID: "build-123", + PRNumber: "pr-123", + }, + want: strings.Join( + []string{ + "$\\textcolor{green}{\\textsf{Tests passed during RECORDING mode:}}$", "`a`[[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording/a.log)]", + "`b`[[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording/b.log)]", + "`c`[[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording/c.log)]", + "$\\textcolor{red}{\\textsf{Tests failed when rerunning REPLAYING mode:}}$", + "`b`[[Error message](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/build-log/replaying_build_after_recording/b_replaying_test.log)] [[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/replaying_after_recording/b.log)]", + "`c`[[Error message](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/build-log/replaying_build_after_recording/c_replaying_test.log)] [[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/replaying_after_recording/c.log)]", + "", + "", + "Tests failed due to non-determinism or randomness when the VCR replayed the response after the HTTP request was made.", + "", + "Please fix these to complete your PR. If you believe these test failures to be incorrect or unrelated to your change, or if you have any questions, please raise the concern with your reviewer.", + "", + "", + "---", + "", + "", + "$\\textcolor{red}{\\textsf{Tests failed during RECORDING mode:}}$", + "`d`[[Error message](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/build-log/recording_build/d_recording_test.log)] [[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording/d.log)]", + "`e`[[Error message](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/build-log/recording_build/e_recording_test.log)] [[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording/e.log)]", + "", + "$\\textcolor{red}{\\textsf{Several tests got terminated during RECORDING mode.}}$", + "$\\textcolor{red}{\\textsf{Errors occurred during RECORDING mode. Please fix them to complete your PR.}}$", + "", + "", + "View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/build-log/recording_test.log) or the [debug log](https://console.cloud.google.com/storage/browser/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording) for each test", + }, + "\n", + ), + }, + { + name: "ReplayingAfterRecordingResult does not have failed tests", + data: recordReplay{ + RecordingResult: &vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + }, + ReplayingAfterRecordingResult: &vcr.Result{ + PassedTests: []string{"a", "b", "c"}, + }, + AllRecordingPassed: true, + BuildID: "build-123", + PRNumber: "pr-123", + }, + want: strings.Join( + []string{ + "$\\textcolor{green}{\\textsf{Tests passed during RECORDING mode:}}$", "`a`[[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording/a.log)]", + "`b`[[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording/b.log)]", + "`c`[[Debug log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording/c.log)]", + "", + "$\\textcolor{green}{\\textsf{No issues found for passed tests after REPLAYING rerun.}}$", + "", + "---", + "", + "", + "", + "", + "$\\textcolor{green}{\\textsf{All tests passed!}}$", + "", + "View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/build-log/recording_test.log) or the [debug log](https://console.cloud.google.com/storage/browser/ci-vcr-logs/beta/refs/heads/auto-pr-pr-123/artifacts/build-123/recording) for each test", + }, + "\n", + ), + }, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + got, err := formatRecordReplay(tc.data) + if err != nil { + t.Fatalf("Failed to format comment: %v", err) + } + if diff := cmp.Diff(tc.want, got); diff != "" { + t.Errorf("formatRecordReplay() returned unexpected difference (-want +got):\n%s", diff) + } + }) + } +} diff --git a/.ci/magician/cmd/test_terraform_vcr_test_analytics.tmpl b/.ci/magician/cmd/test_terraform_vcr_test_analytics.tmpl new file mode 100644 index 000000000000..9c4006541946 --- /dev/null +++ b/.ci/magician/cmd/test_terraform_vcr_test_analytics.tmpl @@ -0,0 +1,20 @@ +#### Tests analytics +Total tests: {{add (add (len .ReplayingResult.PassedTests) (len .ReplayingResult.SkippedTests)) (len .ReplayingResult.FailedTests) }} +Passed tests: {{len .ReplayingResult.PassedTests}} +Skipped tests: {{len .ReplayingResult.SkippedTests}} +Affected tests: {{len .ReplayingResult.FailedTests}} + +
+Click here to see the affected service packages +
+{{if .RunFullVCR}} +All service packages are affected +{{else if gt (len .AffectedServices) 0}} + +{{else}} +None +{{end}} +
+
diff --git a/.ci/magician/cmd/test_terraform_vcr_with_replay_failed_tests.tmpl b/.ci/magician/cmd/test_terraform_vcr_with_replay_failed_tests.tmpl new file mode 100644 index 000000000000..68c804f67584 --- /dev/null +++ b/.ci/magician/cmd/test_terraform_vcr_with_replay_failed_tests.tmpl @@ -0,0 +1,12 @@ +#### Action taken +
+Found {{len .ReplayingResult.FailedTests}} affected test(s) by replaying old test recordings. Starting RECORDING based on the most recent commit. Click here to see the affected tests + +
+ +
+
+ +[Get to know how VCR tests work](https://googlecloudplatform.github.io/magic-modules/docs/getting-started/contributing/#general-contributing-steps) diff --git a/.ci/magician/cmd/test_terraform_vcr_without_replay_failed_tests.tmpl b/.ci/magician/cmd/test_terraform_vcr_without_replay_failed_tests.tmpl new file mode 100644 index 000000000000..9c342f06f610 --- /dev/null +++ b/.ci/magician/cmd/test_terraform_vcr_without_replay_failed_tests.tmpl @@ -0,0 +1,7 @@ +{{- if .ReplayingErr -}} +$\textcolor{red}{\textsf{Errors occurred during REPLAYING mode. Please fix them to complete your PR.}}$ +{{- else -}} +$\textcolor{green}{\textsf{All tests passed!}}$ +{{- end}} + +View the [build log](https://storage.cloud.google.com/ci-vcr-logs/beta/refs/heads/auto-pr-{{.PRNumber}}/artifacts/{{.BuildID}}/build-log/replaying_test.log) diff --git a/GNUmakefile b/GNUmakefile index 68ee86de465d..a462c011415e 100644 --- a/GNUmakefile +++ b/GNUmakefile @@ -137,4 +137,5 @@ doctor: refresh-go: cd mmv1;\ bundle exec compiler.rb -e terraform -o $(OUTPUT_PATH) -v $(VERSION) $(mmv1_compile) --go-yaml; \ - go run . --yaml --template \ No newline at end of file + go run . --yaml --template; \ + go run . --yaml --handwritten \ No newline at end of file diff --git a/docs/content/develop/breaking-changes/make-a-breaking-change.md b/docs/content/develop/breaking-changes/make-a-breaking-change.md index 7fe9526760fd..a7ead0be56bf 100644 --- a/docs/content/develop/breaking-changes/make-a-breaking-change.md +++ b/docs/content/develop/breaking-changes/make-a-breaking-change.md @@ -63,7 +63,7 @@ The general process for contributing a breaking change to the 1. Make the `main` branch forwards-compatible with the major release 2. Add deprecations and warnings to the `main` branch of `magic-modules` 3. Add upgrade guide entries to the `main` branch of `magic-modules` -4. Make the breaking change on ~~`FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}`~~ `main` temporarily +4. Make the breaking change on `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}` These are covered in more detail in the following sections. The upgrade guide and the actual breaking change will be merged only after both are completed. @@ -184,11 +184,36 @@ The upgrade guide and the actual breaking change will be merged only after both ### Make the breaking change on `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}` -> [!CAUTION] -> `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}` is not yet ready. If you want to make your -> breaking change ahead of time (possibly for early review), please submit a PR on `main` with the title prefix "6.0.0 - ". -> Ensure that a Github Issue is created as per all PR's, and our team will manually switch your PR over to -> `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}} when it is ready. +When working on your breaking change, make sure that your base branch +is `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}`. This +means that you will follow the standard +[contribution process]({{< ref "/get-started/contribution-process" >}}) +with the following changes: + +1. Before you start, check out and sync your local `magic-modules` and provider + repositories with the upstream major release branches. + ```bash + cd ~/magic-modules + git checkout FEATURE-BRANCH-major-release-{{% param "majorVersion" %}} + git pull --ff-only origin FEATURE-BRANCH-major-release-{{% param "majorVersion" %}} + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google + git checkout FEATURE-BRANCH-major-release-{{% param "majorVersion" %}} + git pull --ff-only origin FEATURE-BRANCH-major-release-{{% param "majorVersion" %}} + cd $GOPATH/src/github.com/hashicorp/terraform-provider-google-beta + git checkout FEATURE-BRANCH-major-release-{{% param "majorVersion" %}} + git pull --ff-only origin FEATURE-BRANCH-major-release-{{% param "majorVersion" %}} + ``` +1. Make sure that any deprecation notices and warnings that you added in previous sections + are present on the major release branch. Changes to the `main` branch will be + merged into the major release branch every Monday. +1. Make the breaking change. +1. Remove any deprecation notices and warnings (including in documentation) not already removed by the breaking change. +1. When you create your pull request, + [change the base branch](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/changing-the-base-branch-of-a-pull-request) + to `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}` +1. To resolve merge conflicts with `git rebase` or `git merge`, use `FEATURE-BRANCH-major-release-{{% param "majorVersion" %}}` instead of `main`. + +The upgrade guide and the actual breaking change will be merged only after both are completed. ## What's next? diff --git a/mmv1/api/product.go b/mmv1/api/product.go index c7fa4705057a..d02a5ac424d2 100644 --- a/mmv1/api/product.go +++ b/mmv1/api/product.go @@ -69,8 +69,6 @@ type Product struct { } func (p *Product) UnmarshalYAML(n *yaml.Node) error { - p.Async = NewAsync() - type productAlias Product aliasObj := (*productAlias)(p) diff --git a/mmv1/api/resource.go b/mmv1/api/resource.go index 71d768a65630..ea122cd6ef82 100644 --- a/mmv1/api/resource.go +++ b/mmv1/api/resource.go @@ -17,6 +17,7 @@ import ( "maps" "regexp" "strings" + "sort" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/product" "github.com/GoogleCloudPlatform/magic-modules/mmv1/api/resource" @@ -887,11 +888,11 @@ func (r Resource) HasZone() bool { // resource functions needed for template that previously existed in terraform.go but due to how files are being inherited here it was easier to put in here // taken wholesale from tpgtools func (r Resource) Updatable() bool { - if r.Immutable && !r.RootLabels() { - return false + if !r.Immutable { + return true } for _, p := range r.AllProperties() { - if !p.Immutable && !(p.Required && p.DefaultFromApi) { + if p.UpdateUrl != "" { return true } } @@ -1467,6 +1468,24 @@ func (r Resource) PropertiesByCustomUpdate() map[UpdateGroup][]*Type { return groupedCustomUpdateProps } +func (r Resource) PropertiesByCustomUpdateGroups() []UpdateGroup { + customUpdateProps := r.propertiesWithCustomUpdate(r.RootProperties()) + var updateGroups []UpdateGroup + for _, prop := range customUpdateProps { + groupedProperty := UpdateGroup{UpdateUrl: prop.UpdateUrl, + UpdateVerb: prop.UpdateVerb, + UpdateId: prop.UpdateId, + FingerprintName: prop.FingerprintName} + + if slices.Contains(updateGroups, groupedProperty){ + continue + } + updateGroups = append(updateGroups, groupedProperty) + } + sort.Slice(updateGroups, func(i, j int) bool { return updateGroups[i].UpdateId < updateGroups[i].UpdateId }) + return updateGroups +} + func (r Resource) FieldSpecificUpdateMethods() bool { return (len(r.PropertiesByCustomUpdate()) > 0) } diff --git a/mmv1/api/type.go b/mmv1/api/type.go index 572e58c4014e..8059e57beead 100644 --- a/mmv1/api/type.go +++ b/mmv1/api/type.go @@ -138,9 +138,9 @@ type Type struct { // ==================== // Array Fields // ==================== - ItemType *Type `yaml:"item_type"` - MinSize int `yaml:"min_size"` - MaxSize int `yaml:"max_size"` + ItemType *Type `yaml:"item_type"` + MinSize string `yaml:"min_size"` + MaxSize string `yaml:"max_size"` // __name ParentName string diff --git a/mmv1/description-copy.go b/mmv1/description-copy.go index 5e16c0758e4d..0294174e95c8 100644 --- a/mmv1/description-copy.go +++ b/mmv1/description-copy.go @@ -10,8 +10,24 @@ import ( "strings" ) +func CopyAllDescriptions() { + identifiers := []string{ + "description:", + "note:", + "set_hash_func:", + "warning:", + "required_properties:", + "optional_properties:", + "attributes:", + } + + for i, id := range identifiers { + CopyText(id, len(identifiers)-1 == i) + } +} + // Used to copy/paste text from Ruby -> Go YAML files -func CopyText(identifier string) { +func CopyText(identifier string, last bool) { var allProductFiles []string = make([]string, 0) files, err := filepath.Glob("products/**/go_product.yaml") if err != nil { @@ -92,12 +108,14 @@ func CopyText(identifier string) { for scanner.Scan() { line := scanner.Text() if firstLine { - if line != "NOT CONVERTED - RUN YAML MODE" { - // log.Printf("skipping %s", goPath) - break - } else { + if strings.Contains(line, "NOT CONVERTED - RUN YAML MODE") { firstLine = false + if !last { + w.WriteString(fmt.Sprintf("NOT CONVERTED - RUN YAML MODE\n")) + } continue + } else { + break } } if strings.Contains(line, identifier) { diff --git a/mmv1/google/template_utils.go b/mmv1/google/template_utils.go index ac47f6bf3374..1053894233ba 100644 --- a/mmv1/google/template_utils.go +++ b/mmv1/google/template_utils.go @@ -43,6 +43,12 @@ func subtract(a, b int) int { return a - b } +// plus returns the sum of a and b +// and used in Go templates +func plus(a, b int) int { + return a + b +} + var TemplateFunctions = template.FuncMap{ "title": SpaceSeparatedTitle, "replace": strings.Replace, @@ -58,5 +64,6 @@ var TemplateFunctions = template.FuncMap{ "format2regex": Format2Regex, "hasPrefix": strings.HasPrefix, "sub": subtract, + "plus": plus, "firstSentence": FirstSentence, } diff --git a/mmv1/main.go b/mmv1/main.go index 284ab3383b0c..851f05737390 100644 --- a/mmv1/main.go +++ b/mmv1/main.go @@ -41,8 +41,7 @@ func main() { flag.Parse() if *yamlMode { - CopyText("description:") - CopyText("note:") + CopyAllDescriptions() } if *templateMode { @@ -107,7 +106,11 @@ func main() { return false }) - var productsForVersion []map[string]interface{} + // In order to only copy/compile files once per provider this must be called outside + // of the products loop. This will get called with the provider from the final iteration + // of the loop + var providerToGenerate *provider.Terraform + var productsForVersion []*api.Product for _, productName := range allProductFiles { productYamlPath := path.Join(productName, "go_product.yaml") @@ -165,7 +168,9 @@ func main() { productApi.Validate() // TODO Q2: set other providers via flag - providerToGenerate := provider.NewTerraform(productApi, *version, startTime) + providerToGenerate = provider.NewTerraform(productApi, *version, startTime) + + productsForVersion = append(productsForVersion, productApi) if !slices.Contains(productsToGenerate, productName) { log.Printf("%s not specified, skipping generation", productName) @@ -174,32 +179,18 @@ func main() { log.Printf("%s: Generating files", productName) providerToGenerate.Generate(*outputPath, productName, generateCode, generateDocs) - - // we need to preserve a single provider instance to use outside of this loop. - productsForVersion = append(productsForVersion, map[string]interface{}{ - "Definitions": productApi, - "Provider": providerToGenerate, - }) } - - // TODO Q2: copy common files } - slices.SortFunc(productsForVersion, func(p1, p2 map[string]interface{}) int { - return strings.Compare(strings.ToLower(p1["Definitions"].(*api.Product).Name), strings.ToLower(p2["Definitions"].(*api.Product).Name)) + slices.SortFunc(productsForVersion, func(p1, p2 *api.Product) int { + return strings.Compare(strings.ToLower(p1.Name), strings.ToLower(p2.Name)) }) - // In order to only copy/compile files once per provider this must be called outside - // of the products loop. This will get called with the provider from the final iteration - // of the loop - finalProduct := productsForVersion[len(productsForVersion)-1] - provider := finalProduct["Provider"].(*provider.Terraform) - - provider.CopyCommonFiles(*outputPath, generateCode, generateDocs) + providerToGenerate.CopyCommonFiles(*outputPath, generateCode, generateDocs) log.Printf("Compiling common files for terraform") if generateCode { - provider.CompileCommonFiles(*outputPath, productsForVersion, "") + providerToGenerate.CompileCommonFiles(*outputPath, productsForVersion, "") // TODO Q2: product overrides } diff --git a/mmv1/products/accesscontextmanager/ServicePerimeters.yaml b/mmv1/products/accesscontextmanager/ServicePerimeters.yaml index 2b941289d41a..98896714991d 100644 --- a/mmv1/products/accesscontextmanager/ServicePerimeters.yaml +++ b/mmv1/products/accesscontextmanager/ServicePerimeters.yaml @@ -57,6 +57,7 @@ properties: name: 'servicePerimeters' description: | The desired Service Perimeters that should replace all existing Service Perimeters in the Access Policy. + custom_flatten: 'templates/terraform/custom_flatten/accesscontextmanager_serviceperimeters_custom_flatten.go.erb' item_type: !ruby/object:Api::Type::NestedObject properties: - !ruby/object:Api::Type::String diff --git a/mmv1/products/cloudfunctions2/Function.yaml b/mmv1/products/cloudfunctions2/Function.yaml index 1027898e7e16..1f2fe20fb289 100644 --- a/mmv1/products/cloudfunctions2/Function.yaml +++ b/mmv1/products/cloudfunctions2/Function.yaml @@ -51,6 +51,8 @@ import_format: ['projects/{{project}}/locations/{{location}}/functions/{{name}}'] taint_resource_on_failed_create: true autogen_async: true +custom_code: !ruby/object:Provider::Terraform::CustomCode + encoder: 'templates/terraform/encoders/cloudfunctions2_runtime_update_policy.go.erb' examples: - !ruby/object:Provider::Terraform::Examples name: 'cloudfunctions2_basic' @@ -277,6 +279,48 @@ examples: unencoded-ar-repo: 'ar-repo' kms_key_name: 'cmek-key' project: 'my-project-name' + - !ruby/object:Provider::Terraform::Examples + name: 'cloudfunctions2_abiu' + primary_resource_id: 'function' + min_version: beta + vars: + bucket_name: 'gcf-source' + service_account: 'gcf-sa' + topic: 'functions2-topic' + function: 'gcf-function' + zip_path: 'function-source.zip' + test_env_vars: + project: :PROJECT_NAME + test_vars_overrides: + zip_path: '"./test-fixtures/function-source-pubsub.zip"' + primary_resource_id: '"terraform-test"' + location: + '"europe-west6"' + # ignore these fields during import step + ignore_read_extra: + - 'build_config.0.source.0.storage_source.0.object' + - 'build_config.0.source.0.storage_source.0.bucket' + - !ruby/object:Provider::Terraform::Examples + name: 'cloudfunctions2_abiu_on_deploy' + primary_resource_id: 'function' + min_version: beta + vars: + bucket_name: 'gcf-source' + service_account: 'gcf-sa' + topic: 'functions2-topic' + function: 'gcf-function' + zip_path: 'function-source.zip' + test_env_vars: + project: :PROJECT_NAME + test_vars_overrides: + zip_path: '"./test-fixtures/function-source-pubsub.zip"' + primary_resource_id: '"terraform-test"' + location: + '"europe-west6"' + # ignore these fields during import step + ignore_read_extra: + - 'build_config.0.source.0.storage_source.0.object' + - 'build_config.0.source.0.storage_source.0.bucket' iam_policy: !ruby/object:Api::Resource::IamPolicy parent_resource_attribute: 'cloud_function' method_name_separator: ':' @@ -448,6 +492,33 @@ properties: name: 'serviceAccount' description: 'The fully-qualified name of the service account to be used for building the container.' default_from_api: true + - !ruby/object:Api::Type::NestedObject + name: 'automaticUpdatePolicy' + description: | + Security patches are applied automatically to the runtime without requiring + the function to be redeployed. + exactly_one_of: + - automatic_update_policy + - on_deploy_update_policy + send_empty_value: true + allow_empty_object: true + default_from_api: true + properties: [] + - !ruby/object:Api::Type::NestedObject + name: 'onDeployUpdatePolicy' + description: | + Security patches are only applied when a function is redeployed. + exactly_one_of: + - automatic_update_policy + - on_deploy_update_policy + send_empty_value: true + allow_empty_object: true + properties: + - !ruby/object:Api::Type::String + name: 'runtimeVersion' + output: true + description: | + The runtime version which was used during latest function deployment. - !ruby/object:Api::Type::NestedObject name: 'serviceConfig' description: 'Describes the Service being deployed.' diff --git a/mmv1/products/compute/Address.yaml b/mmv1/products/compute/Address.yaml index e137f2286548..de0f0398c124 100644 --- a/mmv1/products/compute/Address.yaml +++ b/mmv1/products/compute/Address.yaml @@ -165,7 +165,6 @@ properties: configure Private Service Connect. Only global internal addresses can use this purpose. - This should only be set when using an Internal address. default_from_api: true - !ruby/object:Api::Type::Enum diff --git a/mmv1/products/compute/BackendService.yaml b/mmv1/products/compute/BackendService.yaml index 150fa6c33361..5a7539e956ad 100644 --- a/mmv1/products/compute/BackendService.yaml +++ b/mmv1/products/compute/BackendService.yaml @@ -834,7 +834,6 @@ properties: UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains equal-weight. - This field is applicable to either: * A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, @@ -844,7 +843,6 @@ properties: Load Balancing). Only MAGLEV and WEIGHTED_MAGLEV values are possible for External Network Load Balancing. The default is MAGLEV. - If session_affinity is not NONE, and this field is not set to MAGLEV, WEIGHTED_MAGLEV, or RING_HASH, session affinity settings will not take effect. diff --git a/mmv1/products/compute/ForwardingRule.yaml b/mmv1/products/compute/ForwardingRule.yaml index e2cfde7e7d5d..4e6f3b6655ab 100644 --- a/mmv1/products/compute/ForwardingRule.yaml +++ b/mmv1/products/compute/ForwardingRule.yaml @@ -286,7 +286,6 @@ properties: * When the `target` is a Private Service Connect Google APIs bundle, you must specify an `IPAddress`. - Otherwise, you can optionally specify an IP address that references an existing static (reserved) IP address resource. When omitted, Google Cloud assigns an ephemeral IP address. @@ -304,7 +303,6 @@ properties: * `global/addresses/address-name` * `address-name` - The forwarding rule's `target` or `backendService`, and in most cases, also the `loadBalancingScheme`, determine the type of IP address that you can use. For detailed information, see @@ -494,7 +492,6 @@ properties: * `vpc-sc` - [ APIs that support VPC Service Controls](https://cloud.google.com/vpc-service-controls/docs/supported-products). * `all-apis` - [All supported Google APIs](https://cloud.google.com/vpc/docs/private-service-connect#supported-apis). - For Private Service Connect forwarding rules that forward traffic to managed services, the target must be a service attachment. update_verb: :POST update_url: 'projects/{{project}}/regions/{{region}}/forwardingRules/{{name}}/setTarget' diff --git a/mmv1/products/compute/GlobalForwardingRule.yaml b/mmv1/products/compute/GlobalForwardingRule.yaml index b287c5445545..af9a88d4d512 100644 --- a/mmv1/products/compute/GlobalForwardingRule.yaml +++ b/mmv1/products/compute/GlobalForwardingRule.yaml @@ -238,7 +238,6 @@ properties: * When the `target` is a Private Service Connect Google APIs bundle, you must specify an `IPAddress`. - Otherwise, you can optionally specify an IP address that references an existing static (reserved) IP address resource. When omitted, Google Cloud assigns an ephemeral IP address. @@ -256,7 +255,6 @@ properties: * `global/addresses/address-name` * `address-name` - The forwarding rule's `target`, and in most cases, also the `loadBalancingScheme`, determine the type of IP address that you can use. For detailed information, see @@ -479,7 +477,6 @@ properties: * `vpc-sc` - [ APIs that support VPC Service Controls](https://cloud.google.com/vpc-service-controls/docs/supported-products). * `all-apis` - [All supported Google APIs](https://cloud.google.com/vpc/docs/private-service-connect#supported-apis). - For Private Service Connect forwarding rules that forward traffic to managed services, the target must be a service attachment. update_verb: :POST update_url: 'projects/{{project}}/global/forwardingRules/{{name}}/setTarget' diff --git a/mmv1/products/compute/HttpHealthCheck.yaml b/mmv1/products/compute/HttpHealthCheck.yaml index a49f63feb4bc..e826da96bf2b 100644 --- a/mmv1/products/compute/HttpHealthCheck.yaml +++ b/mmv1/products/compute/HttpHealthCheck.yaml @@ -21,7 +21,6 @@ description: | An HttpHealthCheck resource. This resource defines a template for how individual VMs should be checked for health, via HTTP. - ~> **Note:** google_compute_http_health_check is a legacy health check. The newer [google_compute_health_check](/docs/providers/google/r/compute_health_check.html) should be preferred for all uses except diff --git a/mmv1/products/compute/HttpsHealthCheck.yaml b/mmv1/products/compute/HttpsHealthCheck.yaml index c8ca925f0989..aeb352b1f5f8 100644 --- a/mmv1/products/compute/HttpsHealthCheck.yaml +++ b/mmv1/products/compute/HttpsHealthCheck.yaml @@ -21,7 +21,6 @@ description: | An HttpsHealthCheck resource. This resource defines a template for how individual VMs should be checked for health, via HTTPS. - ~> **Note:** google_compute_https_health_check is a legacy health check. The newer [google_compute_health_check](/docs/providers/google/r/compute_health_check.html) should be preferred for all uses except diff --git a/mmv1/products/compute/Instance.yaml b/mmv1/products/compute/Instance.yaml index e3912dac2c84..d316af142a77 100644 --- a/mmv1/products/compute/Instance.yaml +++ b/mmv1/products/compute/Instance.yaml @@ -526,7 +526,6 @@ properties: - !ruby/object:Api::Type::ResourceRef name: 'networkAttachment' resource: 'networkAttachment' - min_version: beta imports: 'selfLink' description: | The URL of the network attachment that this interface should connect to in the following format: diff --git a/mmv1/products/compute/ManagedSslCertificate.yaml b/mmv1/products/compute/ManagedSslCertificate.yaml index 106e07944520..1975c56387a5 100644 --- a/mmv1/products/compute/ManagedSslCertificate.yaml +++ b/mmv1/products/compute/ManagedSslCertificate.yaml @@ -117,7 +117,6 @@ properties: characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - These are in the same namespace as the managed SSL certificates. - !ruby/object:Api::Type::NestedObject name: 'managed' diff --git a/mmv1/products/compute/NetworkAttachment.yaml b/mmv1/products/compute/NetworkAttachment.yaml index 95eb9d46418a..dcfe283e0ca6 100644 --- a/mmv1/products/compute/NetworkAttachment.yaml +++ b/mmv1/products/compute/NetworkAttachment.yaml @@ -13,7 +13,6 @@ --- !ruby/object:Api::Resource name: 'NetworkAttachment' -min_version: beta kind: 'compute#networkAttachment' description: | A network attachment is a resource that lets a producer Virtual Private Cloud (VPC) network initiate connections to a consumer VPC network through a Private Service Connect interface. diff --git a/mmv1/products/compute/RegionBackendService.yaml b/mmv1/products/compute/RegionBackendService.yaml index 4cb367ee521d..9b62faaad6f8 100644 --- a/mmv1/products/compute/RegionBackendService.yaml +++ b/mmv1/products/compute/RegionBackendService.yaml @@ -842,7 +842,6 @@ properties: UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains equal-weight. - This field is applicable to either: * A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, @@ -852,7 +851,6 @@ properties: Load Balancing). Only MAGLEV and WEIGHTED_MAGLEV values are possible for External Network Load Balancing. The default is MAGLEV. - If session_affinity is not NONE, and this field is not set to MAGLEV, WEIGHTED_MAGLEV, or RING_HASH, session affinity settings will not take effect. diff --git a/mmv1/products/compute/RegionSslCertificate.yaml b/mmv1/products/compute/RegionSslCertificate.yaml index fc3e6c825658..ba2941812207 100644 --- a/mmv1/products/compute/RegionSslCertificate.yaml +++ b/mmv1/products/compute/RegionSslCertificate.yaml @@ -128,7 +128,6 @@ properties: characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - These are in the same namespace as the managed SSL certificates. default_from_api: true custom_expand: 'templates/terraform/custom_expand/name_or_name_prefix.go.erb' diff --git a/mmv1/products/compute/Reservation.yaml b/mmv1/products/compute/Reservation.yaml index 55285cfcbe91..eee584be87b5 100644 --- a/mmv1/products/compute/Reservation.yaml +++ b/mmv1/products/compute/Reservation.yaml @@ -141,6 +141,7 @@ properties: description: | Type of sharing for this shared-reservation default_from_api: true + immutable: true - !ruby/object:Api::Type::Map name: 'projectMap' description: | diff --git a/mmv1/products/compute/SslCertificate.yaml b/mmv1/products/compute/SslCertificate.yaml index 0329f36edfb4..f00bbfe1d91c 100644 --- a/mmv1/products/compute/SslCertificate.yaml +++ b/mmv1/products/compute/SslCertificate.yaml @@ -115,7 +115,6 @@ properties: characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - These are in the same namespace as the managed SSL certificates. default_from_api: true custom_expand: 'templates/terraform/custom_expand/name_or_name_prefix.go.erb' diff --git a/mmv1/products/compute/TargetHttpsProxy.yaml b/mmv1/products/compute/TargetHttpsProxy.yaml index 50d1983c027b..a22e5c8cc870 100644 --- a/mmv1/products/compute/TargetHttpsProxy.yaml +++ b/mmv1/products/compute/TargetHttpsProxy.yaml @@ -130,6 +130,18 @@ properties: update_url: 'projects/{{project}}/global/targetHttpsProxies/{{name}}/setQuicOverride' default_value: :NONE custom_flatten: 'templates/terraform/custom_flatten/default_if_empty.erb' + - !ruby/object:Api::Type::Enum + name: 'tlsEarlyData' + description: | + Specifies whether TLS 1.3 0-RTT Data (“Early Data”) should be accepted for this service. + Early Data allows a TLS resumption handshake to include the initial application payload + (a HTTP request) alongside the handshake, reducing the effective round trips to “zero”. + This applies to TLS 1.3 connections over TCP (HTTP/2) as well as over UDP (QUIC/h3). + values: + - :STRICT + - :PERMISSIVE + - :DISABLED + default_from_api: true - !ruby/object:Api::Type::Array name: 'certificateManagerCertificates' description: | diff --git a/mmv1/products/compute/UrlMap.yaml b/mmv1/products/compute/UrlMap.yaml index d3dad5414195..5b0cda5141d1 100644 --- a/mmv1/products/compute/UrlMap.yaml +++ b/mmv1/products/compute/UrlMap.yaml @@ -124,6 +124,16 @@ examples: http_health_check_name: "health-check" backend_bucket_name: "static-asset-backend-bucket" storage_bucket_name: "static-asset-bucket" + - !ruby/object:Provider::Terraform::Examples + name: "url_map_custom_error_response_policy" + primary_resource_id: "urlmap" + min_version: beta + vars: + url_map_name: "urlmap" + backend_service_name: "login" + http_health_check_name: "health-check" + storage_bucket_name: "static-asset-bucket" + error_backend_bucket_name: "error-backend-bucket" properties: - !ruby/object:Api::Type::Time name: 'creationTimestamp' @@ -304,6 +314,66 @@ properties: description: | An optional description of this resource. Provide this property when you create the resource. + - !ruby/object:Api::Type::NestedObject + name: 'defaultCustomErrorResponsePolicy' + min_version: beta + description: | + defaultCustomErrorResponsePolicy specifies how the Load Balancer returns error responses when BackendServiceor BackendBucket responds with an error. + + This policy takes effect at the PathMatcher level and applies only when no policy has been defined for the error code at lower levels like RouteRule and PathRule within this PathMatcher. If an error code does not have a policy defined in defaultCustomErrorResponsePolicy, then a policy defined for the error code in UrlMap.defaultCustomErrorResponsePolicy takes effect. + + For example, consider a UrlMap with the following configuration: + + UrlMap.defaultCustomErrorResponsePolicy is configured with policies for 5xx and 4xx errors + A RouteRule for /coming_soon/ is configured for the error code 404. + If the request is for www.myotherdomain.com and a 404 is encountered, the policy under UrlMap.defaultCustomErrorResponsePolicy takes effect. If a 404 response is encountered for the request www.example.com/current_events/, the pathMatcher's policy takes effect. If however, the request for www.example.com/coming_soon/ encounters a 404, the policy in RouteRule.customErrorResponsePolicy takes effect. If any of the requests in this example encounter a 500 error code, the policy at UrlMap.defaultCustomErrorResponsePolicy takes effect. + + When used in conjunction with pathMatcher.defaultRouteAction.retryPolicy, retries take precedence. Only once all retries are exhausted, the defaultCustomErrorResponsePolicy is applied. While attempting a retry, if load balancer is successful in reaching the service, the defaultCustomErrorResponsePolicy is ignored and the response from the service is returned to the client. + + defaultCustomErrorResponsePolicy is supported only for global external Application Load Balancers. + properties: + - !ruby/object:Api::Type::Array + name: 'errorResponseRule' + api_name: errorResponseRules + description: | + Specifies rules for returning error responses. + In a given policy, if you specify rules for both a range of error codes as well as rules for specific error codes then rules with specific error codes have a higher priority. + For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). + If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Array + name: 'matchResponseCodes' + description: | + Valid values include: + - A number between 400 and 599: For example 401 or 503, in which case the load balancer applies the policy if the error code exactly matches this value. + - 5xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 500 to 599. + - 4xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 400 to 499. + Values must be unique within matchResponseCodes and across all errorResponseRules of CustomErrorResponsePolicy. + item_type: Api::Type::String + - !ruby/object:Api::Type::String + name: 'path' + description: | + The full path to a file within backendBucket . For example: /errors/defaultError.html + path must start with a leading slash. path cannot have trailing slashes. + If the file is not available in backendBucket or the load balancer cannot reach the BackendBucket, a simple Not Found Error is returned to the client. + The value must be from 1 to 1024 characters + - !ruby/object:Api::Type::Integer + name: 'overrideResponseCode' + description: | + The HTTP status code returned with the response containing the custom error content. + If overrideResponseCode is not supplied, the same response code returned by the original backend bucket or backend service is returned to the client. + - !ruby/object:Api::Type::ResourceRef + name: 'errorService' + resource: 'BackendBucket' + imports: 'selfLink' + description: | + The full or partial URL to the BackendBucket resource that contains the custom error content. Examples are: + https://www.googleapis.com/compute/v1/projects/project/global/backendBuckets/myBackendBucket + compute/v1/projects/project/global/backendBuckets/myBackendBucket + global/backendBuckets/myBackendBucket + If errorService is not specified at lower levels like pathMatcher, pathRule and routeRule, an errorService specified at a higher level in the UrlMap will be used. If UrlMap.defaultCustomErrorResponsePolicy contains one or more errorResponseRules[], it must specify errorService. + If load balancer cannot reach the backendBucket, a simple Not Found Error will be returned, with the original response code (or overrideResponseCode if configured). - !ruby/object:Api::Type::NestedObject name: 'headerAction' description: | @@ -403,6 +473,64 @@ properties: \* is allowed is at the end following a /. The string fed to the path matcher does not include any text after the first ? or #, and those chars are not allowed here. + - !ruby/object:Api::Type::NestedObject + name: 'customErrorResponsePolicy' + min_version: beta + description: | + customErrorResponsePolicy specifies how the Load Balancer returns error responses when BackendServiceor BackendBucket responds with an error. + If a policy for an error code is not configured for the PathRule, a policy for the error code configured in pathMatcher.defaultCustomErrorResponsePolicy is applied. If one is not specified in pathMatcher.defaultCustomErrorResponsePolicy, the policy configured in UrlMap.defaultCustomErrorResponsePolicy takes effect. + For example, consider a UrlMap with the following configuration: + UrlMap.defaultCustomErrorResponsePolicy are configured with policies for 5xx and 4xx errors + A PathRule for /coming_soon/ is configured for the error code 404. + If the request is for www.myotherdomain.com and a 404 is encountered, the policy under UrlMap.defaultCustomErrorResponsePolicy takes effect. If a 404 response is encountered for the request www.example.com/current_events/, the pathMatcher's policy takes effect. If however, the request for www.example.com/coming_soon/ encounters a 404, the policy in PathRule.customErrorResponsePolicy takes effect. If any of the requests in this example encounter a 500 error code, the policy at UrlMap.defaultCustomErrorResponsePolicy takes effect. + customErrorResponsePolicy is supported only for global external Application Load Balancers. + properties: + - !ruby/object:Api::Type::Array + name: 'errorResponseRule' + api_name: errorResponseRules + description: | + Specifies rules for returning error responses. + In a given policy, if you specify rules for both a range of error codes as well as rules for specific error codes then rules with specific error codes have a higher priority. + For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). + If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Array + name: 'matchResponseCodes' + description: | + Valid values include: + + - A number between 400 and 599: For example 401 or 503, in which case the load balancer applies the policy if the error code exactly matches this value. + - 5xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 500 to 599. + - 4xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 400 to 499. + + Values must be unique within matchResponseCodes and across all errorResponseRules of CustomErrorResponsePolicy. + item_type: Api::Type::String + - !ruby/object:Api::Type::String + name: 'path' + description: | + The full path to a file within backendBucket . For example: /errors/defaultError.html + path must start with a leading slash. path cannot have trailing slashes. + If the file is not available in backendBucket or the load balancer cannot reach the BackendBucket, a simple Not Found Error is returned to the client. + The value must be from 1 to 1024 characters + - !ruby/object:Api::Type::Integer + name: 'overrideResponseCode' + description: | + The HTTP status code returned with the response containing the custom error content. + If overrideResponseCode is not supplied, the same response code returned by the original backend bucket or backend service is returned to the client. + - !ruby/object:Api::Type::ResourceRef + name: "errorService" + resource: "BackendBucket" + imports: 'selfLink' + description: | + The full or partial URL to the BackendBucket resource that contains the custom error content. Examples are: + + https://www.googleapis.com/compute/v1/projects/project/global/backendBuckets/myBackendBucket + compute/v1/projects/project/global/backendBuckets/myBackendBucket + global/backendBuckets/myBackendBucket + + If errorService is not specified at lower levels like pathMatcher, pathRule and routeRule, an errorService specified at a higher level in the UrlMap will be used. If UrlMap.defaultCustomErrorResponsePolicy contains one or more errorResponseRules[], it must specify errorService. + If load balancer cannot reach the backendBucket, a simple Not Found Error will be returned, with the original response code (or overrideResponseCode if configured). - !ruby/object:Api::Type::NestedObject name: 'routeAction' description: | @@ -1915,6 +2043,68 @@ properties: The value must be between 0.0 and 100.0 inclusive. validation: !ruby/object:Provider::Terraform::Validation function: 'validation.FloatBetween(0, 100)' + - !ruby/object:Api::Type::NestedObject + name: 'defaultCustomErrorResponsePolicy' + min_version: beta + description: | + defaultCustomErrorResponsePolicy specifies how the Load Balancer returns error responses when BackendServiceor BackendBucket responds with an error. + + This policy takes effect at the PathMatcher level and applies only when no policy has been defined for the error code at lower levels like RouteRule and PathRule within this PathMatcher. If an error code does not have a policy defined in defaultCustomErrorResponsePolicy, then a policy defined for the error code in UrlMap.defaultCustomErrorResponsePolicy takes effect. + + For example, consider a UrlMap with the following configuration: + + UrlMap.defaultCustomErrorResponsePolicy is configured with policies for 5xx and 4xx errors + A RouteRule for /coming_soon/ is configured for the error code 404. + If the request is for www.myotherdomain.com and a 404 is encountered, the policy under UrlMap.defaultCustomErrorResponsePolicy takes effect. If a 404 response is encountered for the request www.example.com/current_events/, the pathMatcher's policy takes effect. If however, the request for www.example.com/coming_soon/ encounters a 404, the policy in RouteRule.customErrorResponsePolicy takes effect. If any of the requests in this example encounter a 500 error code, the policy at UrlMap.defaultCustomErrorResponsePolicy takes effect. + + When used in conjunction with pathMatcher.defaultRouteAction.retryPolicy, retries take precedence. Only once all retries are exhausted, the defaultCustomErrorResponsePolicy is applied. While attempting a retry, if load balancer is successful in reaching the service, the defaultCustomErrorResponsePolicy is ignored and the response from the service is returned to the client. + + defaultCustomErrorResponsePolicy is supported only for global external Application Load Balancers. + properties: + - !ruby/object:Api::Type::Array + name: 'errorResponseRule' + api_name: errorResponseRules + description: | + Specifies rules for returning error responses. + In a given policy, if you specify rules for both a range of error codes as well as rules for specific error codes then rules with specific error codes have a higher priority. + For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). + If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. + item_type: !ruby/object:Api::Type::NestedObject + properties: + - !ruby/object:Api::Type::Array + name: 'matchResponseCodes' + description: | + Valid values include: + - A number between 400 and 599: For example 401 or 503, in which case the load balancer applies the policy if the error code exactly matches this value. + - 5xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 500 to 599. + - 4xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 400 to 499. + Values must be unique within matchResponseCodes and across all errorResponseRules of CustomErrorResponsePolicy. + item_type: Api::Type::String + - !ruby/object:Api::Type::String + name: 'path' + description: | + The full path to a file within backendBucket. For example: /errors/defaultError.html + path must start with a leading slash. path cannot have trailing slashes. + If the file is not available in backendBucket or the load balancer cannot reach the BackendBucket, a simple Not Found Error is returned to the client. + The value must be from 1 to 1024 characters. + - !ruby/object:Api::Type::Integer + name: 'overrideResponseCode' + description: | + The HTTP status code returned with the response containing the custom error content. + If overrideResponseCode is not supplied, the same response code returned by the original backend bucket or backend service is returned to the client. + - !ruby/object:Api::Type::ResourceRef + name: "errorService" + resource: "BackendBucket" + imports: 'selfLink' + description: | + The full or partial URL to the BackendBucket resource that contains the custom error content. Examples are: + + https://www.googleapis.com/compute/v1/projects/project/global/backendBuckets/myBackendBucket + compute/v1/projects/project/global/backendBuckets/myBackendBucket + global/backendBuckets/myBackendBucket + + If errorService is not specified at lower levels like pathMatcher, pathRule and routeRule, an errorService specified at a higher level in the UrlMap will be used. If UrlMap.defaultCustomErrorResponsePolicy contains one or more errorResponseRules[], it must specify errorService. + If load balancer cannot reach the backendBucket, a simple Not Found Error will be returned, with the original response code (or overrideResponseCode if configured). - !ruby/object:Api::Type::Array name: "test" api_name: tests diff --git a/mmv1/products/compute/go_Address.yaml b/mmv1/products/compute/go_Address.yaml index a44d0393d9b2..beca1b1ebe45 100644 --- a/mmv1/products/compute/go_Address.yaml +++ b/mmv1/products/compute/go_Address.yaml @@ -158,7 +158,6 @@ properties: configure Private Service Connect. Only global internal addresses can use this purpose. - This should only be set when using an Internal address. default_from_api: true - name: 'networkTier' diff --git a/mmv1/products/compute/go_BackendService.yaml b/mmv1/products/compute/go_BackendService.yaml index 90a748f145cd..c13d11c4ee35 100644 --- a/mmv1/products/compute/go_BackendService.yaml +++ b/mmv1/products/compute/go_BackendService.yaml @@ -820,7 +820,6 @@ properties: UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains equal-weight. - This field is applicable to either: * A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, @@ -830,7 +829,6 @@ properties: Load Balancing). Only MAGLEV and WEIGHTED_MAGLEV values are possible for External Network Load Balancing. The default is MAGLEV. - If session_affinity is not NONE, and this field is not set to MAGLEV, WEIGHTED_MAGLEV, or RING_HASH, session affinity settings will not take effect. diff --git a/mmv1/products/compute/go_Disk.yaml b/mmv1/products/compute/go_Disk.yaml index 1a1b1d207055..9b0f045ee0fe 100644 --- a/mmv1/products/compute/go_Disk.yaml +++ b/mmv1/products/compute/go_Disk.yaml @@ -72,7 +72,6 @@ custom_code: custom_diff: - 'customdiff.ForceNewIfChange("size", IsDiskShrinkage)' - 'hyperDiskIopsUpdateDiffSupress' - - 'tpgresource.SetLabelsDiff' examples: - name: 'disk_basic' primary_resource_id: 'default' diff --git a/mmv1/products/compute/go_Firewall.yaml b/mmv1/products/compute/go_Firewall.yaml index 8d820debf8d2..c28d906a27b9 100644 --- a/mmv1/products/compute/go_Firewall.yaml +++ b/mmv1/products/compute/go_Firewall.yaml @@ -33,9 +33,9 @@ references: 'Official Documentation': 'https://cloud.google.com/vpc/docs/firewalls' api: 'https://cloud.google.com/compute/docs/reference/v1/firewalls' docs: - optional_properties: '* `enable_logging` - (Optional, Deprecated) This field denotes whether to enable logging for a particular firewall rule. -If logging is enabled, logs will be exported to Stackdriver. Deprecated in favor of `log_config` -' + optional_properties: | + * `enable_logging` - (Optional, Deprecated) This field denotes whether to enable logging for a particular firewall rule. + If logging is enabled, logs will be exported to Stackdriver. Deprecated in favor of `log_config` base_url: 'projects/{{project}}/global/firewalls' has_self_link: true update_verb: 'PATCH' @@ -161,6 +161,7 @@ properties: description: | An optional description of this resource. Provide this property when you create the resource. + send_empty_value: true - name: 'destinationRanges' type: Array description: | diff --git a/mmv1/products/compute/go_ForwardingRule.yaml b/mmv1/products/compute/go_ForwardingRule.yaml index ef174df9b6f4..2b84b92a45a4 100644 --- a/mmv1/products/compute/go_ForwardingRule.yaml +++ b/mmv1/products/compute/go_ForwardingRule.yaml @@ -50,7 +50,6 @@ custom_code: post_create: 'templates/terraform/post_create/go/labels.tmpl' custom_diff: - 'forwardingRuleCustomizeDiff' - - 'tpgresource.SetLabelsDiff' legacy_long_form_project: true examples: - name: 'internal_http_lb_with_mig_backend' @@ -274,7 +273,6 @@ properties: * When the `target` is a Private Service Connect Google APIs bundle, you must specify an `IPAddress`. - Otherwise, you can optionally specify an IP address that references an existing static (reserved) IP address resource. When omitted, Google Cloud assigns an ephemeral IP address. @@ -292,7 +290,6 @@ properties: * `global/addresses/address-name` * `address-name` - The forwarding rule's `target` or `backendService`, and in most cases, also the `loadBalancingScheme`, determine the type of IP address that you can use. For detailed information, see @@ -477,7 +474,6 @@ properties: * `vpc-sc` - [ APIs that support VPC Service Controls](https://cloud.google.com/vpc-service-controls/docs/supported-products). * `all-apis` - [All supported Google APIs](https://cloud.google.com/vpc/docs/private-service-connect#supported-apis). - For Private Service Connect forwarding rules that forward traffic to managed services, the target must be a service attachment. update_url: 'projects/{{project}}/regions/{{region}}/forwardingRules/{{name}}/setTarget' update_verb: 'POST' diff --git a/mmv1/products/compute/go_GlobalForwardingRule.yaml b/mmv1/products/compute/go_GlobalForwardingRule.yaml index 77c4f47f2e7a..c85cfd9c7525 100644 --- a/mmv1/products/compute/go_GlobalForwardingRule.yaml +++ b/mmv1/products/compute/go_GlobalForwardingRule.yaml @@ -229,7 +229,6 @@ properties: * When the `target` is a Private Service Connect Google APIs bundle, you must specify an `IPAddress`. - Otherwise, you can optionally specify an IP address that references an existing static (reserved) IP address resource. When omitted, Google Cloud assigns an ephemeral IP address. @@ -247,7 +246,6 @@ properties: * `global/addresses/address-name` * `address-name` - The forwarding rule's `target`, and in most cases, also the `loadBalancingScheme`, determine the type of IP address that you can use. For detailed information, see @@ -467,7 +465,6 @@ properties: * `vpc-sc` - [ APIs that support VPC Service Controls](https://cloud.google.com/vpc-service-controls/docs/supported-products). * `all-apis` - [All supported Google APIs](https://cloud.google.com/vpc/docs/private-service-connect#supported-apis). - For Private Service Connect forwarding rules that forward traffic to managed services, the target must be a service attachment. required: true update_url: 'projects/{{project}}/global/forwardingRules/{{name}}/setTarget' diff --git a/mmv1/products/compute/go_HttpHealthCheck.yaml b/mmv1/products/compute/go_HttpHealthCheck.yaml index f24acd5ea12c..dca967b94c9c 100644 --- a/mmv1/products/compute/go_HttpHealthCheck.yaml +++ b/mmv1/products/compute/go_HttpHealthCheck.yaml @@ -19,7 +19,6 @@ description: | An HttpHealthCheck resource. This resource defines a template for how individual VMs should be checked for health, via HTTP. - ~> **Note:** google_compute_http_health_check is a legacy health check. The newer [google_compute_health_check](/docs/providers/google/r/compute_health_check.html) should be preferred for all uses except diff --git a/mmv1/products/compute/go_HttpsHealthCheck.yaml b/mmv1/products/compute/go_HttpsHealthCheck.yaml index f08af7f264dd..a186293003ad 100644 --- a/mmv1/products/compute/go_HttpsHealthCheck.yaml +++ b/mmv1/products/compute/go_HttpsHealthCheck.yaml @@ -19,7 +19,6 @@ description: | An HttpsHealthCheck resource. This resource defines a template for how individual VMs should be checked for health, via HTTPS. - ~> **Note:** google_compute_https_health_check is a legacy health check. The newer [google_compute_health_check](/docs/providers/google/r/compute_health_check.html) should be preferred for all uses except diff --git a/mmv1/products/compute/go_Instance.yaml b/mmv1/products/compute/go_Instance.yaml index 874134de2d9d..49eb97d03c47 100644 --- a/mmv1/products/compute/go_Instance.yaml +++ b/mmv1/products/compute/go_Instance.yaml @@ -510,7 +510,6 @@ properties: description: | The URL of the network attachment that this interface should connect to in the following format: projects/{projectNumber}/regions/{region_name}/networkAttachments/{network_attachment_name}. - min_version: 'beta' resource: 'networkAttachment' imports: 'selfLink' - name: 'scheduling' diff --git a/mmv1/products/compute/go_ManagedSslCertificate.yaml b/mmv1/products/compute/go_ManagedSslCertificate.yaml index 2dd937bc54e6..5f1f840af51f 100644 --- a/mmv1/products/compute/go_ManagedSslCertificate.yaml +++ b/mmv1/products/compute/go_ManagedSslCertificate.yaml @@ -27,19 +27,19 @@ references: 'Official Documentation': 'https://cloud.google.com/load-balancing/docs/ssl-certificates' api: 'https://cloud.google.com/compute/docs/reference/rest/v1/sslCertificates' docs: - warning: 'This resource should be used with extreme caution! Provisioning an SSL -certificate is complex. Ensure that you understand the lifecycle of a -certificate before attempting complex tasks like cert rotation automatically. -This resource will "return" as soon as the certificate object is created, -but post-creation the certificate object will go through a "provisioning" -process. The provisioning process can complete only when the domain name -for which the certificate is created points to a target pool which, itself, -points at the certificate. Depending on your DNS provider, this may take -some time, and migrating from self-managed certificates to Google-managed -certificates may entail some downtime while the certificate provisions. + warning: | + This resource should be used with extreme caution! Provisioning an SSL + certificate is complex. Ensure that you understand the lifecycle of a + certificate before attempting complex tasks like cert rotation automatically. + This resource will "return" as soon as the certificate object is created, + but post-creation the certificate object will go through a "provisioning" + process. The provisioning process can complete only when the domain name + for which the certificate is created points to a target pool which, itself, + points at the certificate. Depending on your DNS provider, this may take + some time, and migrating from self-managed certificates to Google-managed + certificates may entail some downtime while the certificate provisions. -In conclusion: Be extremely cautious. -' + In conclusion: Be extremely cautious. base_url: 'projects/{{project}}/global/sslCertificates' has_self_link: true immutable: true @@ -107,7 +107,6 @@ properties: characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - These are in the same namespace as the managed SSL certificates. - name: 'managed' type: NestedObject diff --git a/mmv1/products/compute/go_NetworkAttachment.yaml b/mmv1/products/compute/go_NetworkAttachment.yaml index 51700c03672c..0750d9be5a76 100644 --- a/mmv1/products/compute/go_NetworkAttachment.yaml +++ b/mmv1/products/compute/go_NetworkAttachment.yaml @@ -17,7 +17,6 @@ name: 'NetworkAttachment' kind: 'compute#networkAttachment' description: | A network attachment is a resource that lets a producer Virtual Private Cloud (VPC) network initiate connections to a consumer VPC network through a Private Service Connect interface. -min_version: 'beta' references: guides: 'Official Documentation': 'https://cloud.google.com/vpc/docs/about-network-attachments' @@ -68,13 +67,11 @@ parameters: type: String description: | Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - min_version: 'beta' required: true - name: 'region' type: ResourceRef description: | URL of the region where the network attachment resides. This field applies only to the region resource. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. - min_version: 'beta' required: true immutable: true default_from_api: true @@ -85,39 +82,32 @@ properties: - name: 'kind' type: String description: 'Type of the resource.' - min_version: 'beta' output: true - name: 'id' type: String description: 'The unique identifier for the resource type. The server generates this identifier.' - min_version: 'beta' output: true - name: 'creationTimestamp' type: Time description: 'Creation timestamp in RFC3339 text format.' - min_version: 'beta' output: true - name: 'description' type: String description: | An optional description of this resource. Provide this property when you create the resource. - min_version: 'beta' - name: 'selfLink' type: String description: 'Server-defined URL for the resource.' - min_version: 'beta' output: true - name: 'selfLinkWithId' type: String description: | Server-defined URL for this resource's resource id. - min_version: 'beta' output: true - name: 'connectionPreference' type: Enum description: | The connection preference of service attachment. The value can be set to ACCEPT_AUTOMATIC. An ACCEPT_AUTOMATIC service attachment is one that always accepts the connection from consumer forwarding rules. - min_version: 'beta' required: true enum_values: - 'ACCEPT_AUTOMATIC' @@ -127,7 +117,6 @@ properties: type: Array description: | An array of connections for all the producers connected to this network attachment. - min_version: 'beta' output: true item_type: type: NestedObject @@ -136,37 +125,31 @@ properties: type: String description: | The status of a connected endpoint to this network attachment. - min_version: 'beta' output: true - name: 'projectIdOrNum' type: String description: | The project id or number of the interface to which the IP was assigned. - min_version: 'beta' output: true - name: 'subnetwork' type: String description: | The subnetwork used to assign the IP to the producer instance network interface. - min_version: 'beta' output: true - name: 'ipAddress' type: String description: | The IPv4 address assigned to the producer instance network interface. This value will be a range in case of Serverless. - min_version: 'beta' output: true - name: 'secondaryIpCidrRanges' type: String description: | Alias IP ranges from the same subnetwork. - min_version: 'beta' output: true - name: 'subnetworks' type: Array description: | An array of URLs where each entry is the URL of a subnet provided by the service consumer to use for endpoints in the producers that connect to this network attachment. - min_version: 'beta' required: true custom_expand: 'templates/terraform/custom_expand/go/array_resourceref_with_validation.go.tmpl' item_type: @@ -180,14 +163,12 @@ properties: type: Array description: | Projects that are not allowed to connect to this network attachment. The project can be specified using its id or number. - min_version: 'beta' item_type: type: String - name: 'producerAcceptLists' type: Array description: | Projects that are allowed to connect to this network attachment. The project can be specified using its id or number. - min_version: 'beta' item_type: type: String - name: 'fingerprint' @@ -195,12 +176,10 @@ properties: description: | Fingerprint of this resource. A hash of the contents stored in this object. This field is used in optimistic locking. An up-to-date fingerprint must be provided in order to patch. - min_version: 'beta' output: true - name: 'network' type: String description: | The URL of the network which the Network Attachment belongs to. Practically it is inferred by fetching the network of the first subnetwork associated. Because it is required that all the subnetworks must be from the same network, it is assured that the Network Attachment belongs to the same network as all the subnetworks. - min_version: 'beta' output: true diff --git a/mmv1/products/compute/go_OrganizationSecurityPolicy.yaml b/mmv1/products/compute/go_OrganizationSecurityPolicy.yaml index 7596cff9d73a..644b09c48415 100644 --- a/mmv1/products/compute/go_OrganizationSecurityPolicy.yaml +++ b/mmv1/products/compute/go_OrganizationSecurityPolicy.yaml @@ -35,6 +35,7 @@ timeouts: delete_minutes: 20 custom_code: post_create: 'templates/terraform/post_create/go/org_security_policy.go.tmpl' + post_delete: 'templates/terraform/post_delete/go/org_security_policy.go.tmpl' post_update: 'templates/terraform/post_update/go/org_security_policy.go.tmpl' test_check_destroy: 'templates/terraform/custom_check_destroy/go/skip_delete_during_test.go.tmpl' examples: diff --git a/mmv1/products/compute/go_OrganizationSecurityPolicyAssociation.yaml b/mmv1/products/compute/go_OrganizationSecurityPolicyAssociation.yaml index 60f6628b6ba4..3101fac1305e 100644 --- a/mmv1/products/compute/go_OrganizationSecurityPolicyAssociation.yaml +++ b/mmv1/products/compute/go_OrganizationSecurityPolicyAssociation.yaml @@ -37,6 +37,7 @@ timeouts: delete_minutes: 20 custom_code: post_create: 'templates/terraform/post_create/go/org_security_policy_association.go.tmpl' + post_delete: 'templates/terraform/post_create/go/org_security_policy_association.go.tmpl' test_check_destroy: 'templates/terraform/custom_check_destroy/go/skip_delete_during_test.go.tmpl' read_error_transform: 'transformSecurityPolicyAssociationReadError' examples: diff --git a/mmv1/products/compute/go_OrganizationSecurityPolicyRule.yaml b/mmv1/products/compute/go_OrganizationSecurityPolicyRule.yaml index 39a78b1cd8e6..0b02fc55bfc2 100644 --- a/mmv1/products/compute/go_OrganizationSecurityPolicyRule.yaml +++ b/mmv1/products/compute/go_OrganizationSecurityPolicyRule.yaml @@ -38,6 +38,7 @@ timeouts: delete_minutes: 20 custom_code: post_create: 'templates/terraform/post_create/go/org_security_policy_rule.go.tmpl' + post_delete: 'templates/terraform/post_create/go/org_security_policy_rule.go.tmpl' post_update: 'templates/terraform/post_create/go/org_security_policy_rule.go.tmpl' test_check_destroy: 'templates/terraform/custom_check_destroy/go/skip_delete_during_test.go.tmpl' examples: diff --git a/mmv1/products/compute/go_RegionBackendService.yaml b/mmv1/products/compute/go_RegionBackendService.yaml index 13700c4f64c0..cab122136a1b 100644 --- a/mmv1/products/compute/go_RegionBackendService.yaml +++ b/mmv1/products/compute/go_RegionBackendService.yaml @@ -826,7 +826,6 @@ properties: UNAVAILABLE_WEIGHT. Otherwise, Load Balancing remains equal-weight. - This field is applicable to either: * A regional backend service with the service_protocol set to HTTP, HTTPS, or HTTP2, @@ -836,7 +835,6 @@ properties: Load Balancing). Only MAGLEV and WEIGHTED_MAGLEV values are possible for External Network Load Balancing. The default is MAGLEV. - If session_affinity is not NONE, and this field is not set to MAGLEV, WEIGHTED_MAGLEV, or RING_HASH, session affinity settings will not take effect. diff --git a/mmv1/products/compute/go_RegionDisk.yaml b/mmv1/products/compute/go_RegionDisk.yaml index eb9eb2ec6181..3a4d7e04b8af 100644 --- a/mmv1/products/compute/go_RegionDisk.yaml +++ b/mmv1/products/compute/go_RegionDisk.yaml @@ -70,7 +70,6 @@ custom_code: custom_diff: - 'customdiff.ForceNewIfChange("size", IsDiskShrinkage)' - 'hyperDiskIopsUpdateDiffSupress' - - 'tpgresource.SetLabelsDiff' examples: - name: 'region_disk_basic' primary_resource_id: 'regiondisk' diff --git a/mmv1/products/compute/go_RegionSslCertificate.yaml b/mmv1/products/compute/go_RegionSslCertificate.yaml index 21f77cac5d19..a365f1cacf13 100644 --- a/mmv1/products/compute/go_RegionSslCertificate.yaml +++ b/mmv1/products/compute/go_RegionSslCertificate.yaml @@ -24,9 +24,9 @@ references: 'Official Documentation': 'https://cloud.google.com/load-balancing/docs/ssl-certificates' api: 'https://cloud.google.com/compute/docs/reference/rest/v1/regionSslCertificates' docs: - optional_properties: '* `name_prefix` - (Optional) Creates a unique name beginning with the - specified prefix. Conflicts with `name`. -' + optional_properties: | + * `name_prefix` - (Optional) Creates a unique name beginning with the + specified prefix. Conflicts with `name`. base_url: 'projects/{{project}}/regions/{{region}}/sslCertificates' has_self_link: true immutable: true @@ -119,7 +119,6 @@ properties: characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - These are in the same namespace as the managed SSL certificates. default_from_api: true custom_expand: 'templates/terraform/custom_expand/go/name_or_name_prefix.go.tmpl' diff --git a/mmv1/products/compute/go_Reservation.yaml b/mmv1/products/compute/go_Reservation.yaml index 397f513e5ade..518522620377 100644 --- a/mmv1/products/compute/go_Reservation.yaml +++ b/mmv1/products/compute/go_Reservation.yaml @@ -132,6 +132,7 @@ properties: type: Enum description: | Type of sharing for this shared-reservation + immutable: true default_from_api: true enum_values: - 'LOCAL' diff --git a/mmv1/products/compute/go_Route.yaml b/mmv1/products/compute/go_Route.yaml index f3f3ecfb2435..ef5b1a44c013 100644 --- a/mmv1/products/compute/go_Route.yaml +++ b/mmv1/products/compute/go_Route.yaml @@ -44,11 +44,11 @@ references: 'Using Routes': 'https://cloud.google.com/vpc/docs/using-routes' api: 'https://cloud.google.com/compute/docs/reference/rest/v1/routes' docs: - optional_properties: '* `next_hop_instance_zone` - (Optional when `next_hop_instance` is - specified) The zone of the instance specified in - `next_hop_instance`. Omit if `next_hop_instance` is specified as - a URL. -' + optional_properties: | + * `next_hop_instance_zone` - (Optional when `next_hop_instance` is + specified) The zone of the instance specified in + `next_hop_instance`. Omit if `next_hop_instance` is specified as + a URL. base_url: 'projects/{{project}}/global/routes' has_self_link: true immutable: true diff --git a/mmv1/products/compute/go_RouterNat.yaml b/mmv1/products/compute/go_RouterNat.yaml index 4ba216388bec..8e6afb9e5415 100644 --- a/mmv1/products/compute/go_RouterNat.yaml +++ b/mmv1/products/compute/go_RouterNat.yaml @@ -150,7 +150,7 @@ properties: is set to MANUAL_ONLY. is_set: true send_empty_value: true - set_hash_func: 'computeRouterNatIPsHash' + set_hash_func: computeRouterNatIPsHash custom_expand: 'templates/terraform/custom_expand/go/array_resourceref_with_validation.go.tmpl' item_type: name: 'address' @@ -198,7 +198,7 @@ properties: api_name: subnetworks is_set: true send_empty_value: true - set_hash_func: 'computeRouterNatSubnetworkHash' + set_hash_func: computeRouterNatSubnetworkHash item_type: type: NestedObject properties: @@ -323,7 +323,7 @@ properties: description: 'A list of rules associated with this NAT.' is_set: true send_empty_value: true - set_hash_func: 'computeRouterNatRulesHash' + set_hash_func: computeRouterNatRulesHash item_type: type: NestedObject properties: @@ -368,7 +368,7 @@ properties: These IP addresses must be valid static external IP addresses assigned to the project. This field is used for public NAT. is_set: true - set_hash_func: 'computeRouterNatIPsHash' + set_hash_func: computeRouterNatIPsHash custom_flatten: 'templates/terraform/custom_flatten/go/nat_rules_ip_set.tmpl' custom_expand: 'templates/terraform/custom_expand/go/array_resourceref_with_validation.go.tmpl' item_type: @@ -386,7 +386,7 @@ properties: These IPs should be used for updating/patching a NAT rule only. This field is used for public NAT. is_set: true - set_hash_func: 'computeRouterNatIPsHash' + set_hash_func: computeRouterNatIPsHash custom_flatten: 'templates/terraform/custom_flatten/go/nat_rules_ip_set.tmpl' custom_expand: 'templates/terraform/custom_expand/go/array_resourceref_with_validation.go.tmpl' item_type: @@ -404,7 +404,7 @@ properties: This field is used for private NAT. is_set: true min_version: 'beta' - set_hash_func: 'computeRouterNatRulesSubnetHash' + set_hash_func: computeRouterNatRulesSubnetHash custom_flatten: 'templates/terraform/custom_flatten/go/nat_rules_subnets_set.tmpl' custom_expand: 'templates/terraform/custom_expand/go/array_resourceref_with_validation.go.tmpl' item_type: @@ -422,7 +422,7 @@ properties: This field is used for private NAT. is_set: true min_version: 'beta' - set_hash_func: 'computeRouterNatRulesSubnetHash' + set_hash_func: computeRouterNatRulesSubnetHash custom_flatten: 'templates/terraform/custom_flatten/go/nat_rules_subnets_set.tmpl' custom_expand: 'templates/terraform/custom_expand/go/array_resourceref_with_validation.go.tmpl' item_type: diff --git a/mmv1/products/compute/go_ServiceAttachment.yaml b/mmv1/products/compute/go_ServiceAttachment.yaml index 39e46277bc81..6c7d3fc345de 100644 --- a/mmv1/products/compute/go_ServiceAttachment.yaml +++ b/mmv1/products/compute/go_ServiceAttachment.yaml @@ -211,7 +211,7 @@ properties: attachment. is_set: true send_empty_value: true - set_hash_func: 'computeServiceAttachmentConsumerAcceptListsHash' + set_hash_func: computeServiceAttachmentConsumerAcceptListsHash item_type: type: NestedObject properties: diff --git a/mmv1/products/compute/go_SslCertificate.yaml b/mmv1/products/compute/go_SslCertificate.yaml index 40808e930866..97bc3263f0a8 100644 --- a/mmv1/products/compute/go_SslCertificate.yaml +++ b/mmv1/products/compute/go_SslCertificate.yaml @@ -24,9 +24,9 @@ references: 'Official Documentation': 'https://cloud.google.com/load-balancing/docs/ssl-certificates' api: 'https://cloud.google.com/compute/docs/reference/rest/v1/sslCertificates' docs: - optional_properties: '* `name_prefix` - (Optional) Creates a unique name beginning with the - specified prefix. Conflicts with `name`. -' + optional_properties: | + * `name_prefix` - (Optional) Creates a unique name beginning with the + specified prefix. Conflicts with `name`. base_url: 'projects/{{project}}/global/sslCertificates' has_self_link: true immutable: true @@ -107,7 +107,6 @@ properties: characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. - These are in the same namespace as the managed SSL certificates. default_from_api: true custom_expand: 'templates/terraform/custom_expand/go/name_or_name_prefix.go.tmpl' diff --git a/mmv1/products/compute/go_TargetHttpsProxy.yaml b/mmv1/products/compute/go_TargetHttpsProxy.yaml index d9ec4cdc7d50..0b758b752c8c 100644 --- a/mmv1/products/compute/go_TargetHttpsProxy.yaml +++ b/mmv1/products/compute/go_TargetHttpsProxy.yaml @@ -125,6 +125,18 @@ properties: - 'NONE' - 'ENABLE' - 'DISABLE' + - name: 'tlsEarlyData' + type: Enum + description: | + Specifies whether TLS 1.3 0-RTT Data (“Early Data”) should be accepted for this service. + Early Data allows a TLS resumption handshake to include the initial application payload + (a HTTP request) alongside the handshake, reducing the effective round trips to “zero”. + This applies to TLS 1.3 connections over TCP (HTTP/2) as well as over UDP (QUIC/h3). + default_from_api: true + enum_values: + - 'STRICT' + - 'PERMISSIVE' + - 'DISABLED' - name: 'certificateManagerCertificates' type: Array description: | diff --git a/mmv1/products/compute/go_UrlMap.yaml b/mmv1/products/compute/go_UrlMap.yaml index 38680546b572..dee0cd5963de 100644 --- a/mmv1/products/compute/go_UrlMap.yaml +++ b/mmv1/products/compute/go_UrlMap.yaml @@ -118,6 +118,15 @@ examples: http_health_check_name: 'health-check' backend_bucket_name: 'static-asset-backend-bucket' storage_bucket_name: 'static-asset-bucket' + - name: 'url_map_custom_error_response_policy' + primary_resource_id: 'urlmap' + min_version: 'beta' + vars: + url_map_name: 'urlmap' + backend_service_name: 'login' + http_health_check_name: 'health-check' + storage_bucket_name: 'static-asset-bucket' + error_backend_bucket_name: 'error-backend-bucket' parameters: properties: - name: 'creationTimestamp' @@ -301,6 +310,63 @@ properties: description: | An optional description of this resource. Provide this property when you create the resource. + - name: 'defaultCustomErrorResponsePolicy' + type: NestedObject + description: | + defaultCustomErrorResponsePolicy specifies how the Load Balancer returns error responses when BackendServiceor BackendBucket responds with an error. + + This policy takes effect at the PathMatcher level and applies only when no policy has been defined for the error code at lower levels like RouteRule and PathRule within this PathMatcher. If an error code does not have a policy defined in defaultCustomErrorResponsePolicy, then a policy defined for the error code in UrlMap.defaultCustomErrorResponsePolicy takes effect. + + For example, consider a UrlMap with the following configuration: + + UrlMap.defaultCustomErrorResponsePolicy is configured with policies for 5xx and 4xx errors + A RouteRule for /coming_soon/ is configured for the error code 404. + If the request is for www.myotherdomain.com and a 404 is encountered, the policy under UrlMap.defaultCustomErrorResponsePolicy takes effect. If a 404 response is encountered for the request www.example.com/current_events/, the pathMatcher's policy takes effect. If however, the request for www.example.com/coming_soon/ encounters a 404, the policy in RouteRule.customErrorResponsePolicy takes effect. If any of the requests in this example encounter a 500 error code, the policy at UrlMap.defaultCustomErrorResponsePolicy takes effect. + + When used in conjunction with pathMatcher.defaultRouteAction.retryPolicy, retries take precedence. Only once all retries are exhausted, the defaultCustomErrorResponsePolicy is applied. While attempting a retry, if load balancer is successful in reaching the service, the defaultCustomErrorResponsePolicy is ignored and the response from the service is returned to the client. + + defaultCustomErrorResponsePolicy is supported only for global external Application Load Balancers. + min_version: 'beta' + properties: + - name: 'errorResponseRule' + type: Array + description: | + Specifies rules for returning error responses. + In a given policy, if you specify rules for both a range of error codes as well as rules for specific error codes then rules with specific error codes have a higher priority. + For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). + If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. + api_name: errorResponseRules + item_type: + type: NestedObject + properties: + - name: 'matchResponseCodes' + type: Array + description: | + Valid values include: + - A number between 400 and 599: For example 401 or 503, in which case the load balancer applies the policy if the error code exactly matches this value. + - 5xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 500 to 599. + - 4xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 400 to 499. + Values must be unique within matchResponseCodes and across all errorResponseRules of CustomErrorResponsePolicy. + item_type: + type: String + - name: 'path' + type: String + description: | + The full path to a file within backendBucket . For example: /errors/defaultError.html + path must start with a leading slash. path cannot have trailing slashes. + If the file is not available in backendBucket or the load balancer cannot reach the BackendBucket, a simple Not Found Error is returned to the client. + The value must be from 1 to 1024 characters + - name: 'overrideResponseCode' + type: Integer + description: | + The HTTP status code returned with the response containing the custom error content. + If overrideResponseCode is not supplied, the same response code returned by the original backend bucket or backend service is returned to the client. + - name: 'errorService' + type: ResourceRef + description: | + The full or partial URL to the BackendBucket resource that contains the custom error content. Examples are: + resource: 'BackendBucket' + imports: 'selfLink' - name: 'headerAction' type: NestedObject description: | @@ -406,6 +472,60 @@ properties: required: true item_type: type: String + - name: 'customErrorResponsePolicy' + type: NestedObject + description: | + customErrorResponsePolicy specifies how the Load Balancer returns error responses when BackendServiceor BackendBucket responds with an error. + If a policy for an error code is not configured for the PathRule, a policy for the error code configured in pathMatcher.defaultCustomErrorResponsePolicy is applied. If one is not specified in pathMatcher.defaultCustomErrorResponsePolicy, the policy configured in UrlMap.defaultCustomErrorResponsePolicy takes effect. + For example, consider a UrlMap with the following configuration: + UrlMap.defaultCustomErrorResponsePolicy are configured with policies for 5xx and 4xx errors + A PathRule for /coming_soon/ is configured for the error code 404. + If the request is for www.myotherdomain.com and a 404 is encountered, the policy under UrlMap.defaultCustomErrorResponsePolicy takes effect. If a 404 response is encountered for the request www.example.com/current_events/, the pathMatcher's policy takes effect. If however, the request for www.example.com/coming_soon/ encounters a 404, the policy in PathRule.customErrorResponsePolicy takes effect. If any of the requests in this example encounter a 500 error code, the policy at UrlMap.defaultCustomErrorResponsePolicy takes effect. + customErrorResponsePolicy is supported only for global external Application Load Balancers. + min_version: 'beta' + properties: + - name: 'errorResponseRule' + type: Array + description: | + Specifies rules for returning error responses. + In a given policy, if you specify rules for both a range of error codes as well as rules for specific error codes then rules with specific error codes have a higher priority. + For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). + If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. + api_name: errorResponseRules + item_type: + type: NestedObject + properties: + - name: 'matchResponseCodes' + type: Array + description: | + Valid values include: + + - A number between 400 and 599: For example 401 or 503, in which case the load balancer applies the policy if the error code exactly matches this value. + - 5xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 500 to 599. + - 4xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 400 to 499. + + Values must be unique within matchResponseCodes and across all errorResponseRules of CustomErrorResponsePolicy. + item_type: + type: String + - name: 'path' + type: String + description: | + The full path to a file within backendBucket . For example: /errors/defaultError.html + path must start with a leading slash. path cannot have trailing slashes. + If the file is not available in backendBucket or the load balancer cannot reach the BackendBucket, a simple Not Found Error is returned to the client. + The value must be from 1 to 1024 characters + - name: 'overrideResponseCode' + type: Integer + description: | + The HTTP status code returned with the response containing the custom error content. + If overrideResponseCode is not supplied, the same response code returned by the original backend bucket or backend service is returned to the client. + - name: 'errorService' + type: ResourceRef + description: | + The full or partial URL to the BackendBucket resource that contains the custom error content. Examples are: + + resource: 'BackendBucket' + imports: 'selfLink' - name: 'routeAction' type: NestedObject description: | @@ -1910,6 +2030,64 @@ properties: The value must be between 0.0 and 100.0 inclusive. validation: function: 'validation.FloatBetween(0, 100)' + - name: 'defaultCustomErrorResponsePolicy' + type: NestedObject + description: | + defaultCustomErrorResponsePolicy specifies how the Load Balancer returns error responses when BackendServiceor BackendBucket responds with an error. + + This policy takes effect at the PathMatcher level and applies only when no policy has been defined for the error code at lower levels like RouteRule and PathRule within this PathMatcher. If an error code does not have a policy defined in defaultCustomErrorResponsePolicy, then a policy defined for the error code in UrlMap.defaultCustomErrorResponsePolicy takes effect. + + For example, consider a UrlMap with the following configuration: + + UrlMap.defaultCustomErrorResponsePolicy is configured with policies for 5xx and 4xx errors + A RouteRule for /coming_soon/ is configured for the error code 404. + If the request is for www.myotherdomain.com and a 404 is encountered, the policy under UrlMap.defaultCustomErrorResponsePolicy takes effect. If a 404 response is encountered for the request www.example.com/current_events/, the pathMatcher's policy takes effect. If however, the request for www.example.com/coming_soon/ encounters a 404, the policy in RouteRule.customErrorResponsePolicy takes effect. If any of the requests in this example encounter a 500 error code, the policy at UrlMap.defaultCustomErrorResponsePolicy takes effect. + + When used in conjunction with pathMatcher.defaultRouteAction.retryPolicy, retries take precedence. Only once all retries are exhausted, the defaultCustomErrorResponsePolicy is applied. While attempting a retry, if load balancer is successful in reaching the service, the defaultCustomErrorResponsePolicy is ignored and the response from the service is returned to the client. + + defaultCustomErrorResponsePolicy is supported only for global external Application Load Balancers. + min_version: 'beta' + properties: + - name: 'errorResponseRule' + type: Array + description: | + Specifies rules for returning error responses. + In a given policy, if you specify rules for both a range of error codes as well as rules for specific error codes then rules with specific error codes have a higher priority. + For example, assume that you configure a rule for 401 (Un-authorized) code, and another for all 4 series error codes (4XX). + If the backend service returns a 401, then the rule for 401 will be applied. However if the backend service returns a 403, the rule for 4xx takes effect. + api_name: errorResponseRules + item_type: + type: NestedObject + properties: + - name: 'matchResponseCodes' + type: Array + description: | + Valid values include: + - A number between 400 and 599: For example 401 or 503, in which case the load balancer applies the policy if the error code exactly matches this value. + - 5xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 500 to 599. + - 4xx: Load Balancer will apply the policy if the backend service responds with any response code in the range of 400 to 499. + Values must be unique within matchResponseCodes and across all errorResponseRules of CustomErrorResponsePolicy. + item_type: + type: String + - name: 'path' + type: String + description: | + The full path to a file within backendBucket. For example: /errors/defaultError.html + path must start with a leading slash. path cannot have trailing slashes. + If the file is not available in backendBucket or the load balancer cannot reach the BackendBucket, a simple Not Found Error is returned to the client. + The value must be from 1 to 1024 characters. + - name: 'overrideResponseCode' + type: Integer + description: | + The HTTP status code returned with the response containing the custom error content. + If overrideResponseCode is not supplied, the same response code returned by the original backend bucket or backend service is returned to the client. + - name: 'errorService' + type: ResourceRef + description: | + The full or partial URL to the BackendBucket resource that contains the custom error content. Examples are: + + resource: 'BackendBucket' + imports: 'selfLink' - name: 'test' type: Array description: | diff --git a/mmv1/products/compute/go_VpnGateway.yaml b/mmv1/products/compute/go_VpnGateway.yaml index 87e0c5d6dda6..07b33e5b2f78 100644 --- a/mmv1/products/compute/go_VpnGateway.yaml +++ b/mmv1/products/compute/go_VpnGateway.yaml @@ -22,9 +22,9 @@ references: guides: api: 'https://cloud.google.com/compute/docs/reference/rest/v1/targetVpnGateways' docs: - warning: 'Classic VPN is deprecating certain functionality on October 31, 2021. For more information, -see the [Classic VPN partial deprecation page](https://cloud.google.com/network-connectivity/docs/vpn/deprecations/classic-vpn-deprecation). -' + warning: | + Classic VPN is deprecating certain functionality on October 31, 2021. For more information, + see the [Classic VPN partial deprecation page](https://cloud.google.com/network-connectivity/docs/vpn/deprecations/classic-vpn-deprecation). base_url: 'projects/{{project}}/regions/{{region}}/targetVpnGateways' has_self_link: true immutable: true diff --git a/mmv1/products/datafusion/Instance.yaml b/mmv1/products/datafusion/Instance.yaml index ccd0efd1c762..e8e0d553ab73 100644 --- a/mmv1/products/datafusion/Instance.yaml +++ b/mmv1/products/datafusion/Instance.yaml @@ -59,6 +59,18 @@ examples: test_vars_overrides: # Mark for testing to avoid service networking connection usage that is not cleaned up prober_test_run: '`options = { prober_test_run = "true" }`' + - !ruby/object:Provider::Terraform::Examples + name: 'data_fusion_instance_psc' + primary_resource_id: 'psc_instance' + vars: + instance_name: 'psc-instance' + network_name: 'datafusion-psc-network' + subnet_name: 'datafusion-psc-subnet' + attachment_name: 'datafusion-psc-attachment' + prober_test_run: '' + test_vars_overrides: + # Mark for testing to avoid service networking connection usage that is not cleaned up + prober_test_run: '`options = { prober_test_run = "true" }`' - !ruby/object:Provider::Terraform::Examples name: 'data_fusion_instance_cmek' primary_resource_id: 'cmek' @@ -237,7 +249,6 @@ properties: description: | The IP range in CIDR notation to use for the managed Data Fusion instance nodes. This range must not overlap with any other ranges used in the Data Fusion instance network. - required: true immutable: true - !ruby/object:Api::Type::String name: 'network' @@ -245,8 +256,46 @@ properties: Name of the network in the project with which the tenant project will be peered for executing pipelines. In case of shared VPC where the network resides in another host project the network should specified in the form of projects/{host-project-id}/global/networks/{network} - required: true immutable: true + - !ruby/object:Api::Type::Enum + name: 'connectionType' + description: | + Optional. Type of connection for establishing private IP connectivity between the Data Fusion customer project VPC and + the corresponding tenant project from a predefined list of available connection modes. + If this field is unspecified for a private instance, VPC peering is used. + values: + - :VPC_PEERING + - :PRIVATE_SERVICE_CONNECT_INTERFACES + immutable: true + - !ruby/object:Api::Type::NestedObject + name: 'privateServiceConnectConfig' + description: | + Optional. Configuration for Private Service Connect. + This is required only when using connection type PRIVATE_SERVICE_CONNECT_INTERFACES. + immutable: true + properties: + - !ruby/object:Api::Type::String + name: 'networkAttachment' + description: | + Optional. The reference to the network attachment used to establish private connectivity. + It will be of the form projects/{project-id}/regions/{region}/networkAttachments/{network-attachment-id}. + This is required only when using connection type PRIVATE_SERVICE_CONNECT_INTERFACES. + immutable: true + - !ruby/object:Api::Type::String + name: 'unreachableCidrBlock' + description: | + Optional. Input only. The CIDR block to which the CDF instance can't route traffic to in the consumer project VPC. + The size of this block should be at least /25. This range should not overlap with the primary address range of any subnetwork used by the network attachment. + This range can be used for other purposes in the consumer VPC as long as there is no requirement for CDF to reach destinations using these addresses. + If this value is not provided, the server chooses a non RFC 1918 address range. The format of this field is governed by RFC 4632. + ignore_read: true + immutable: true + - !ruby/object:Api::Type::String + name: 'effectiveUnreachableCidrBlock' + description: | + Output only. The CIDR block to which the CDF instance can't route traffic to in the consumer project VPC. + The size of this block is /25. The format of this field is governed by RFC 4632. + output: true - !ruby/object:Api::Type::String name: 'zone' description: | diff --git a/mmv1/products/datafusion/go_Instance.yaml b/mmv1/products/datafusion/go_Instance.yaml index fd14261d37be..f76d7bc4950f 100644 --- a/mmv1/products/datafusion/go_Instance.yaml +++ b/mmv1/products/datafusion/go_Instance.yaml @@ -305,6 +305,7 @@ properties: If accelerators are enabled it is possible a permadiff will be created with the Options field. Users will need to either manually update their state file to include these diffed options, or include the field in a [lifecycle ignore changes block](https://developer.hashicorp.com/terraform/language/meta-arguments/lifecycle#ignore_changes). item_type: + type: NestedObject properties: - name: 'acceleratorType' type: Enum @@ -323,4 +324,3 @@ properties: enum_values: - 'ENABLED' - 'DISABLED' - type: NestedObject diff --git a/mmv1/products/firebasehosting/Site.yaml b/mmv1/products/firebasehosting/Site.yaml index 1fc419d686b1..f20104f3b333 100644 --- a/mmv1/products/firebasehosting/Site.yaml +++ b/mmv1/products/firebasehosting/Site.yaml @@ -24,6 +24,8 @@ references: !ruby/object:Api::Resource::ReferenceLinks guides: 'Official Documentation': 'https://firebase.google.com/docs/hosting' api: 'https://firebase.google.com/docs/reference/hosting/rest/v1beta1/projects.sites' +custom_code: !ruby/object:Provider::Terraform::CustomCode + pre_create: templates/terraform/pre_create/firebasehosting_site.go.erb import_format: ['projects/{{project}}/sites/{{site_id}}', 'sites/{{site_id}}', '{{site_id}}'] examples: diff --git a/mmv1/products/healthcare/Dataset.yaml b/mmv1/products/healthcare/Dataset.yaml index eb5d12034582..7538b95986f0 100644 --- a/mmv1/products/healthcare/Dataset.yaml +++ b/mmv1/products/healthcare/Dataset.yaml @@ -35,6 +35,15 @@ examples: dataset_name: 'example-dataset' location: 'us-central1' time_zone: 'America/New_York' + - !ruby/object:Provider::Terraform::Examples + name: 'healthcare_dataset_cmek' + primary_resource_id: 'default' + vars: + dataset_name: 'example-dataset' + location: 'us-central1' + time_zone: 'America/New_York' + key_name: 'example-key' + keyring_name: 'example-keyring' custom_code: !ruby/object:Provider::Terraform::CustomCode decoder: templates/terraform/decoders/long_name_to_self_link.go.erb parameters: @@ -66,3 +75,18 @@ properties: The fully qualified name of this dataset output: true ignore_read: true + - !ruby/object:Api::Type::NestedObject + name: 'encryptionSpec' + required: false + immutable: true + default_from_api: true + properties: + - !ruby/object:Api::Type::String + name: 'kmsKeyName' + description: | + KMS encryption key that is used to secure this dataset and its sub-resources. The key used for + encryption and the dataset must be in the same location. If empty, the default Google encryption + key will be used to secure this dataset. The format is + projects/{projectId}/locations/{locationId}/keyRings/{keyRingId}/cryptoKeys/{keyId}. + required: false + immutable: true diff --git a/mmv1/products/identityplatform/Config.yaml b/mmv1/products/identityplatform/Config.yaml index 97bfb4f98354..1a8ed31bceac 100644 --- a/mmv1/products/identityplatform/Config.yaml +++ b/mmv1/products/identityplatform/Config.yaml @@ -233,6 +233,7 @@ properties: name: 'smsRegionConfig' description: | Configures the regions where users are allowed to send verification SMS for the project or tenant. This is based on the calling code of the destination phone number. + default_from_api: true properties: - !ruby/object:Api::Type::NestedObject name: 'allowByDefault' diff --git a/mmv1/products/pubsub/go_Subscription.yaml b/mmv1/products/pubsub/go_Subscription.yaml index 3ee6f3eb70fa..02ab2a488f57 100644 --- a/mmv1/products/pubsub/go_Subscription.yaml +++ b/mmv1/products/pubsub/go_Subscription.yaml @@ -75,6 +75,14 @@ examples: subscription_name: 'example-subscription' dataset_id: 'example_dataset' table_id: 'example_table' + - name: 'pubsub_subscription_push_bq_service_account' + primary_resource_id: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + dataset_id: 'example_dataset' + table_id: 'example_table' + service_account_id: 'example-bqw' - name: 'pubsub_subscription_push_cloudstorage' primary_resource_id: 'example' vars: @@ -87,6 +95,13 @@ examples: topic_name: 'example-topic' subscription_name: 'example-subscription' bucket_name: 'example-bucket' + - name: 'pubsub_subscription_push_cloudstorage_service_account' + primary_resource_id: 'example' + vars: + topic_name: 'example-topic' + subscription_name: 'example-subscription' + bucket_name: 'example-bucket' + service_account_id: 'example-stw' parameters: properties: - name: 'name' @@ -133,17 +148,15 @@ properties: description: | When true, use the topic's schema as the columns to write to in BigQuery, if it exists. Only one of use_topic_schema and use_table_schema can be set. - # Not present in Ruby version - # conflicts: - # - use_table_schema + conflicts: + - use_table_schema - name: 'useTableSchema' type: Boolean description: | When true, use the BigQuery table's schema as the columns to write to in BigQuery. Messages must be published in JSON format. Only one of use_topic_schema and use_table_schema can be set. - # Not present in Ruby version - # conflicts: - # - use_topic_schema + conflicts: + - use_topic_schema - name: 'writeMetadata' type: Boolean description: | @@ -155,6 +168,12 @@ properties: When true and use_topic_schema or use_table_schema is true, any fields that are a part of the topic schema or message schema that are not part of the BigQuery table schema are dropped when writing to BigQuery. Otherwise, the schemas must be kept in sync and any messages with extra fields are not written and remain in the subscription's backlog. + - name: 'serviceAccountEmail' + type: String + description: | + The service account to use to write to BigQuery. If not specified, the Pub/Sub + [service agent](https://cloud.google.com/iam/docs/service-agents), + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, is used. - name: 'cloudStorageConfig' type: NestedObject description: | @@ -212,6 +231,12 @@ properties: type: Boolean description: | When true, write the subscription name, messageId, publishTime, attributes, and orderingKey as additional fields in the output. + - name: 'serviceAccountEmail' + type: String + description: | + The service account to use to write to Cloud Storage. If not specified, the Pub/Sub + [service agent](https://cloud.google.com/iam/docs/service-agents), + service-{project_number}@gcp-sa-pubsub.iam.gserviceaccount.com, is used. - name: 'pushConfig' type: NestedObject description: | diff --git a/mmv1/products/vertexai/FeatureGroup.yaml b/mmv1/products/vertexai/FeatureGroup.yaml index 22ca5e4df9d9..635a97f554db 100644 --- a/mmv1/products/vertexai/FeatureGroup.yaml +++ b/mmv1/products/vertexai/FeatureGroup.yaml @@ -90,6 +90,8 @@ properties: - !ruby/object:Api::Type::NestedObject name: bigQuery description: Indicates that features for this group come from BigQuery Table/View. By default treats the source as a sparse time series source, which is required to have an entityId and a feature_timestamp column in the source. + update_mask_fields: + - 'bigQuery.entityIdColumns' properties: - !ruby/object:Api::Type::NestedObject name: bigQuerySource @@ -103,5 +105,5 @@ properties: description: 'BigQuery URI to a table, up to 2000 characters long. For example: `bq://projectId.bqDatasetId.bqTableId.`' - !ruby/object:Api::Type::Array name: entityIdColumns - description: Columns to construct entityId / row keys. Currently only supports 1 entity_id_column. If not provided defaults to entityId. + description: Columns to construct entityId / row keys. If not provided defaults to entityId. item_type: Api::Type::String diff --git a/mmv1/products/vertexai/FeatureOnlineStore.yaml b/mmv1/products/vertexai/FeatureOnlineStore.yaml index 702626b7a1ad..663299718ce9 100644 --- a/mmv1/products/vertexai/FeatureOnlineStore.yaml +++ b/mmv1/products/vertexai/FeatureOnlineStore.yaml @@ -50,13 +50,12 @@ examples: ignore_read_extra: - force_destroy - !ruby/object:Provider::Terraform::Examples - name: vertex_ai_featureonlinestore_with_beta_fields_optimized + name: vertex_ai_featureonlinestore_with_optimized primary_resource_id: featureonlinestore vars: name: example_feature_online_store_optimized ignore_read_extra: - force_destroy - min_version: beta - !ruby/object:Provider::Terraform::Examples name: vertex_ai_featureonlinestore_with_beta_fields_bigtable primary_resource_id: featureonlinestore @@ -154,9 +153,8 @@ properties: - !ruby/object:Api::Type::NestedObject name: dedicatedServingEndpoint description: | - The dedicated serving endpoint for this FeatureOnlineStore, which is different from common vertex service endpoint. Only need to set when you choose Optimized storage type or enable EmbeddingManagement. Will use public endpoint by default. + The dedicated serving endpoint for this FeatureOnlineStore, which is different from common vertex service endpoint. Only need to be set when you choose Optimized storage type or enable EmbeddingManagement. Will use public endpoint by default. default_from_api: true - min_version: beta properties: - !ruby/object:Api::Type::String name: publicEndpointDomainName diff --git a/mmv1/products/vmwareengine/Network.yaml b/mmv1/products/vmwareengine/Network.yaml index bc74de06cb59..196ef5e6cd2b 100644 --- a/mmv1/products/vmwareengine/Network.yaml +++ b/mmv1/products/vmwareengine/Network.yaml @@ -43,6 +43,10 @@ async: !ruby/object:Api::OpAsync import_format: ["projects/{{project}}/locations/{{location}}/vmwareEngineNetworks/{{name}}"] autogen_async: true + +# There is a handwritten sweeper that provides a list of locations to sweep +skip_sweeper: true + examples: - !ruby/object:Provider::Terraform::Examples name: "vmware_engine_network_standard" diff --git a/mmv1/provider/terraform.go b/mmv1/provider/terraform.go index 893c98c36587..ff16e3614ab8 100644 --- a/mmv1/provider/terraform.go +++ b/mmv1/provider/terraform.go @@ -276,8 +276,8 @@ func (t Terraform) CopyCommonFiles(outputFolder string, generateCode, generateDo t.CopyFileList(outputFolder, files) } -// To compile a new folder, add the folder to foldersCopiedToRootDir or foldersCopiedToGoogleDir. -// To compile a file, add the file to singleFiles +// To copy a new folder, add the folder to foldersCopiedToRootDir or foldersCopiedToGoogleDir. +// To copy a file, add the file to singleFiles func (t Terraform) getCommonCopyFiles(versionName string, generateCode, generateDocs bool) map[string]string { // key is the target file and value is the source file commonCopyFiles := make(map[string]string, 0) @@ -319,9 +319,9 @@ func (t Terraform) getCommonCopyFiles(versionName string, generateCode, generate // Case 3: When copy a single file, save the target as key and source as value to the map singleFiles singleFiles := map[string]string{ "go.sum": "third_party/terraform/go.sum", - "go.mod": "third_party/terraform/go.mod", + "go.mod": "third_party/terraform/go/go.mod", ".go-version": "third_party/terraform/.go-version", - "terraform-registry-manifest.json": "third_party/terraform/terraform-registry-manifest.json", + "terraform-registry-manifest.json": "third_party/terraform/go/terraform-registry-manifest.json", } maps.Copy(commonCopyFiles, singleFiles) @@ -332,7 +332,7 @@ func (t Terraform) getCopyFilesInFolder(folderPath, targetDir string) map[string m := make(map[string]string, 0) filepath.WalkDir(folderPath, func(path string, di fs.DirEntry, err error) error { if !di.IsDir() && !strings.HasSuffix(di.Name(), ".tmpl") && !strings.HasSuffix(di.Name(), ".erb") { - fname := strings.TrimPrefix(path, "third_party/terraform/") + fname := strings.TrimPrefix(strings.Replace(path, "/go/", "/", 1), "third_party/terraform/") target := fname if targetDir != "." { target = fmt.Sprintf("%s/%s", targetDir, fname) @@ -389,11 +389,11 @@ func (t Terraform) CopyFileList(outputFolder string, files map[string]string) { // common_compile_file, // override_path = nil // ) -func (t Terraform) CompileCommonFiles(outputFolder string, products []map[string]interface{}, overridePath string) { +func (t Terraform) CompileCommonFiles(outputFolder string, products []*api.Product, overridePath string) { t.generateResourcesForVersion(products) files := t.getCommonCompileFiles(t.TargetVersionName) templateData := NewTemplateData(outputFolder, t.Version) - t.CompileFileList(outputFolder, files, *templateData) + t.CompileFileList(outputFolder, files, *templateData, products) } // To compile a new folder, add the folder to foldersCompiledToRootDir or foldersCompiledToGoogleDir. @@ -424,10 +424,10 @@ func (t Terraform) getCommonCompileFiles(versionName string) map[string]string { // Case 3: When compile a single file, save the target as key and source as value to the map singleFiles singleFiles := map[string]string{ - "main.go": "third_party/terraform/main.go.tmpl", - ".goreleaser.yml": "third_party/terraform/.goreleaser.yml.tmpl", - ".release/release-metadata.hcl": "third_party/terraform/release-metadata.hcl.tmpl", - ".copywrite.hcl": "third_party/terraform/.copywrite.hcl.tmpl", + "main.go": "third_party/terraform/go/main.go.tmpl", + ".goreleaser.yml": "third_party/terraform/go/.goreleaser.yml.tmpl", + ".release/release-metadata.hcl": "third_party/terraform/go/release-metadata.hcl.tmpl", + ".copywrite.hcl": "third_party/terraform/go/.copywrite.hcl.tmpl", } maps.Copy(commonCompileFiles, singleFiles) @@ -438,10 +438,10 @@ func (t Terraform) getCompileFilesInFolder(folderPath, targetDir string) map[str m := make(map[string]string, 0) filepath.WalkDir(folderPath, func(path string, di fs.DirEntry, err error) error { if !di.IsDir() && strings.HasSuffix(di.Name(), ".tmpl") { - fname := strings.TrimPrefix(path, "third_party/terraform/") + fname := strings.TrimPrefix(strings.Replace(path, "/go/", "/", 1), "third_party/terraform/") fname = strings.TrimSuffix(fname, ".tmpl") target := fname - if targetDir != "" { + if targetDir != "." { target = fmt.Sprintf("%s/%s", targetDir, fname) } m[target] = path @@ -453,17 +453,16 @@ func (t Terraform) getCompileFilesInFolder(folderPath, targetDir string) map[str } // def compile_file_list(output_folder, files, file_template, pwd = Dir.pwd) -func (t Terraform) CompileFileList(outputFolder string, files map[string]string, fileTemplate TemplateData) { +func (t Terraform) CompileFileList(outputFolder string, files map[string]string, fileTemplate TemplateData, products []*api.Product) { + providerWithProducts := ProviderWithProducts{ + Terraform: t, + Products: products, + } + if err := os.MkdirAll(outputFolder, os.ModePerm); err != nil { log.Println(fmt.Errorf("error creating output directory %v: %v", outputFolder, err)) } - // TODO: is this needed? - // err := os.Chdir(outputFolder) - // if err != nil { - // log.Fatalf("Could not move into the directory %s", outputFolder) - // } - for target, source := range files { targetFile := filepath.Join(outputFolder, target) targetDir := filepath.Dir(targetFile) @@ -477,12 +476,10 @@ func (t Terraform) CompileFileList(outputFolder string, files map[string]string, formatFile := filepath.Ext(targetFile) == ".go" - fileTemplate.GenerateFile(targetFile, source, t, formatFile, templates...) + fileTemplate.GenerateFile(targetFile, source, providerWithProducts, formatFile, templates...) t.replaceImportPath(outputFolder, target) t.addHashicorpCopyRightHeader(outputFolder, target) } - // TODO: is this needed? - // Dir.chdir pwd } // def add_hashicorp_copyright_header(output_folder, target) @@ -635,34 +632,48 @@ func (t Terraform) ImportPathFromVersion(v string) string { return fmt.Sprintf("%s/%s", tpg, dir) } -// # Gets the list of services dependent on the version ga, beta, and private -// # If there are some resources of a servcie is in GA, -// # then this service is in GA. Otherwise, the service is in BETA +func (t Terraform) ProviderFromVersion() string { + var dir string + switch t.TargetVersionName { + case "ga": + dir = RESOURCE_DIRECTORY_GA + case "beta": + dir = RESOURCE_DIRECTORY_BETA + default: + dir = RESOURCE_DIRECTORY_PRIVATE + } + return dir +} + +// Gets the list of services dependent on the version ga, beta, and private +// If there are some resources of a servcie is in GA, +// then this service is in GA. Otherwise, the service is in BETA // def get_mmv1_services_in_version(products, version) -// -// services = [] -// products.map do |product| -// product_definition = product[:definitions] -// if version == 'ga' -// some_resource_in_ga = false -// product_definition.objects.each do |object| -// break if some_resource_in_ga -// -// if !object.exclude && -// !object.not_in_version?(product_definition.version_obj_or_closest(version)) -// some_resource_in_ga = true -// end -// end -// -// services << product[:definitions].name.downcase if some_resource_in_ga -// else -// services << product[:definitions].name.downcase -// end -// end -// services -// -// end -// +func (t Terraform) GetMmv1ServicesInVersion(products []*api.Product) []string { + var services []string + for _, product := range products { + if t.TargetVersionName == "ga" { + someResourceInGA := false + for _, object := range product.Objects { + if someResourceInGA { + break + } + + if !object.Exclude && !object.NotInVersion(product.VersionObjOrClosest(t.TargetVersionName)) { + someResourceInGA = true + } + } + + if someResourceInGA { + services = append(services, strings.ToLower(product.Name)) + } + } else { + services = append(services, strings.ToLower(product.Name)) + } + } + return services +} + // def generate_newyaml(pwd, data) // // # @api.api_name is the service folder name @@ -899,10 +910,8 @@ func (t Terraform) ImportPathFromVersion(v string) string { // # The variable resources_for_version is used to generate resources in file // # mmv1/third_party/terraform/provider/provider_mmv1_resources.go.erb // def generate_resources_for_version(products, version) -func (t *Terraform) generateResourcesForVersion(products []map[string]interface{}) { - // products.each do |product| - for _, product := range products { - productDefinition := product["Definitions"].(*api.Product) +func (t *Terraform) generateResourcesForVersion(products []*api.Product) { + for _, productDefinition := range products { service := strings.ToLower(productDefinition.Name) for _, object := range productDefinition.Objects { if object.Exclude || object.NotInVersion(productDefinition.VersionObjOrClosest(t.TargetVersionName)) { @@ -1009,3 +1018,23 @@ func (t Terraform) DCLVersion() string { return "" } } + +// Gets the provider versions supported by a version +func (t Terraform) SupportedProviderVersions() []string { + var supported []string + for i, v := range product.ORDER { + if i == 0 { + continue + } + supported = append(supported, v) + if v == t.TargetVersionName { + break + } + } + return supported +} + +type ProviderWithProducts struct { + Terraform + Products []*api.Product +} diff --git a/mmv1/provider/terraform/common~copy.yaml b/mmv1/provider/terraform/common~copy.yaml index 1565f5b67fea..b9ad0c850979 100644 --- a/mmv1/provider/terraform/common~copy.yaml +++ b/mmv1/provider/terraform/common~copy.yaml @@ -46,7 +46,14 @@ <% end -%> <% - Dir["third_party/terraform/provider/**/*.go"].each do |file_path| + Dir["third_party/terraform/provider/*.go"].each do |file_path| + fname = file_path.delete_prefix("third_party/terraform/provider") +-%> +'<%= dir -%>/provider/<%= fname -%>': 'third_party/terraform/provider/<%= fname -%>' +<% end -%> + +<% + Dir["third_party/terraform/provider/universe/*.go"].each do |file_path| fname = file_path.delete_prefix("third_party/terraform/provider") -%> '<%= dir -%>/provider/<%= fname -%>': 'third_party/terraform/provider/<%= fname -%>' @@ -130,12 +137,21 @@ <% end -%> <% - Dir["third_party/terraform/scripts/**/*.*"].each do |file_path| + Dir["third_party/terraform/scripts/*.*"].each do |file_path| + next if file_path.end_with?('.erb') + fname = file_path.delete_prefix('third_party/terraform/') +-%> +'<%= fname -%>': '<%= file_path -%>' +<% end -%> + +<% + Dir["third_party/terraform/scripts/affectedtests/*.*"].each do |file_path| next if file_path.end_with?('.erb') fname = file_path.delete_prefix('third_party/terraform/') -%> '<%= fname -%>': '<%= file_path -%>' <% end -%> + '<%= dir -%>/test-fixtures/': 'third_party/terraform/test-fixtures' <% end -%> <% if generate_docs -%> diff --git a/mmv1/provider/terraform_tgc.rb b/mmv1/provider/terraform_tgc.rb index 150b1c80a1ce..075888bfb87d 100644 --- a/mmv1/provider/terraform_tgc.rb +++ b/mmv1/provider/terraform_tgc.rb @@ -332,7 +332,11 @@ def copy_common_files(output_folder, generate_code, _generate_docs) ['converters/google/resources/logging_folder_bucket_config.go', 'third_party/tgc/logging_folder_bucket_config.go'], ['converters/google/resources/logging_organization_bucket_config.go', - 'third_party/tgc/logging_organization_bucket_config.go'] + 'third_party/tgc/logging_organization_bucket_config.go'], + ['converters/google/resources/logging_project_bucket_config.go', + 'third_party/tgc/logging_project_bucket_config.go'], + ['converters/google/resources/logging_billing_account_bucket_config.go', + 'third_party/tgc/logging_billing_account_bucket_config.go'] ]) end diff --git a/mmv1/template-converter.go b/mmv1/template-converter.go index cab87cd86b63..16d06276cc19 100644 --- a/mmv1/template-converter.go +++ b/mmv1/template-converter.go @@ -11,6 +11,7 @@ import ( "strings" "github.com/golang/glog" + "golang.org/x/exp/slices" ) func find(root, ext string) []string { @@ -73,7 +74,8 @@ func convertTemplate(folder string) int { } func convertAllHandwrittenFiles() int { - folders := []string{} + // Add third_party/terraform to convert files in this folder + folders := []string{"third_party/terraform"} // Get all of the service folders servicesRoot := "third_party/terraform/services" @@ -86,11 +88,26 @@ func convertAllHandwrittenFiles() int { folders = append(folders, rubyDir) } + // Get all of the utility folders + utilsExceptionFolders := []string{".teamcity", "website", "META.d", "go", "services", "test-fixtures", "versionq"} + utilsRoot := "third_party/terraform" + utilsFolders, err := ioutil.ReadDir(utilsRoot) + if err != nil { + log.Fatal(err) + } + for _, utilsFolder := range utilsFolders { + if !utilsFolder.IsDir() || slices.Contains(utilsExceptionFolders, utilsFolder.Name()) { + continue + } + rubyDir := fmt.Sprintf("%s/%s", "third_party/terraform", utilsFolder.Name()) + folders = append(folders, rubyDir) + } + counts := 0 for _, folder := range folders { counts += convertHandwrittenFiles(folder) } - log.Printf("%d service handwritten files in total", counts) + log.Printf("%d handwritten files in total", counts) return counts } @@ -107,7 +124,9 @@ func convertHandwrittenFiles(folder string) int { for _, file := range files { filePath := path.Join(folder, file) - + if checkExceptionList(filePath) { + continue + } data, err := os.ReadFile(filePath) if err != nil { log.Fatalf("Cannot open the file: %v", file) @@ -193,6 +212,13 @@ func replace(data []byte) []byte { } data = r.ReplaceAll(data, []byte(`{{- if eq $.TargetVersionName "ga" }}`)) + // Replace <%= "-" + version unless version == 'ga' -%> + r, err = regexp.Compile(`<%= "-" \+ version unless version == 'ga'[\s-]*%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{- if ne $.TargetVersionName "ga" -}}-{{$.TargetVersionName}}{{- end }}`)) + // Replace \n\n<% unless version.nil? || version == ['|"]ga['|"] -%> r, err = regexp.Compile(`\n\n(\s*)<% unless version\.nil\? \|\| version == ['|"]ga['|"] -%>`) if err != nil { @@ -207,6 +233,167 @@ func replace(data []byte) []byte { } data = r.ReplaceAll(data, []byte(`{{- if or (ne $.TargetVersionName "") (eq $.TargetVersionName "ga") }}`)) + // Replace <% if version.nil? || version == ['|"]ga['|"] -%> + r, err = regexp.Compile(`<% if version\.nil\? \|\| version == ['|"]ga['|"] -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{- if or (eq $.TargetVersionName "") (eq $.TargetVersionName "ga") }}`)) + + // Replace <% Api::Product::Version::ORDER[1..Api::Product::Version::ORDER.index(version)].each do |aliased_version| -%> + r, err = regexp.Compile(`<% Api::Product::Version::ORDER\[1\.\.Api::Product::Version::ORDER\.index\(version\)\]\.each do \|aliased_version\| -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ range $$aliasedVersion := $.SupportedProviderVersions -}}`)) + + // Replace <%= provider_name -?%> + r, err = regexp.Compile(`<%= provider_name -?%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $.ProviderFromVersion }}`)) + + // Replace <% products.each do |product| -%> + r, err = regexp.Compile(`<% products\.each do \|product\| -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{- range $$product := $.Products }}`)) + + // Replace <% products.map.each do |product| -%> + r, err = regexp.Compile(`<% products\.map\.each do \|product\| -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{- range $$product := $.Products }}`)) + + // Replace <% resources_for_version.each do |object| -%> + r, err = regexp.Compile(`<% resources_for_version\.each do \|object\| -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{- range $$object := $.ResourcesForVersion }}`)) + + // Replace <% unless object[:resource_name].nil? -%> + r, err = regexp.Compile(`<% unless object\[\:resource_name\]\.nil\? -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{- if $$object.ResourceName }}`)) + + // Replace <% unless object[:iam_class_name].nil? -%> + r, err = regexp.Compile(`<% unless object\[\:iam_class_name\]\.nil\? -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{- if $$object.IamClassName }}`)) + + // Replace <%= object[:terraform_name] -%> + r, err = regexp.Compile(`<%= object\[\:terraform_name\] -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $$object.TerraformName }}`)) + + // Replace <%= object[:resource_name] -%> + r, err = regexp.Compile(`<%= object\[\:resource_name\] -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $$object.ResourceName }}`)) + + // Replace <%= object[:iam_class_name] -%> + r, err = regexp.Compile(`<%= object\[\:iam_class_name\] -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $$object.IamClassName }}`)) + + // Replace <%= product[:definitions].name -%> + r, err = regexp.Compile(`<%= product\[\:definitions\]\.name -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $$product.Name }}`)) + + // Replace <%= product[:definitions].name.underscore -%> + r, err = regexp.Compile(`<%= product\[\:definitions\]\.name\.underscore -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ underscore $$product.Name }}`)) + + // Replace <%= product[:definitions].name.underscore.upcase -%> + r, err = regexp.Compile(`<%= product\[\:definitions\]\.name\.underscore\.upcase -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ upper (underscore $$product.Name) }}`)) + + // Replace <%= product[:definitions].name.base_url -%> + r, err = regexp.Compile(`<%= product\[\:definitions\]\.base_url -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $$product.BaseUrl }}`)) + + // Replace <%= product[:definitions].name.underscore.downcase -%> + r, err = regexp.Compile(`<%= product\[\:definitions\]\.name\.underscore\.downcase -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ lower (underscore $$product.Name) }}`)) + + // Replace <%= product[:definitions].name.downcase -%> + r, err = regexp.Compile(`<%= product\[\:definitions\]\.name\.downcase -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ lower $$product.Name }}`)) + + // Replace <% get_mmv1_services_in_version(products, version).each do |service| -%> + r, err = regexp.Compile(`<% get_mmv1_services_in_version\(products, version\)\.each do \|service\| -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{- range $$service := $.GetMmv1ServicesInVersion $.Products }}`)) + + // Replace <%= resource_count %> + r, err = regexp.Compile(`<%= resource_count %>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $.ResourceCount }}`)) + + // Replace <%= iam_resource_count %> + r, err = regexp.Compile(`<%= iam_resource_count %>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $.IAMResourceCount }}`)) + + // Replace <%= resource_count + iam_resource_count %> + r, err = regexp.Compile(`<%= resource_count \+ iam_resource_count %>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ plus $.ResourceCount $.IAMResourceCount }}`)) + + // Replace <%= service -%> + r, err = regexp.Compile(`<%= service -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $$service }}`)) + + // Replace <%= aliased_version -%> + r, err = regexp.Compile(`<%= aliased_version -%>`) + if err != nil { + log.Fatalf("Cannot compile the regular expression: %v", err) + } + data = r.ReplaceAll(data, []byte(`{{ $$aliasedVersion }}`)) + // Replace <%= dcl_version(version) -%> r, err = regexp.Compile(`<%= dcl_version\(version\) -%>`) if err != nil { @@ -282,7 +469,7 @@ func replace(data []byte) []byte { if err != nil { log.Fatalf("Cannot compile the regular expression: %v", err) } - data = r.ReplaceAll(data, []byte(`{{- if eq $.Name "Disk" }}`)) + data = r.ReplaceAll(data, []byte(`{{ if eq $.Name "Disk" -}}`)) // Replace <% elsif object.name == 'RegionDisk' -%> r, err = regexp.Compile(`<% elsif object.name == 'RegionDisk' -%>`) @@ -478,12 +665,12 @@ func replace(data []byte) []byte { } data = r.ReplaceAll(data, []byte(``)) - // Replace <%= "-" + version unless version == 'ga' -%> - r, err = regexp.Compile(`<%= "-" \+ version unless version == 'ga'[\s-]*%>`) + // Replace <% provider_name = version.nil? || version == 'ga' ? 'google' : 'google-' + version -%> + r, err = regexp.Compile(`<% provider_name = version.nil\? \|\| version == 'ga' \? 'google' : 'google-' \+ version -%>\n`) if err != nil { log.Fatalf("Cannot compile the regular expression: %v", err) } - data = r.ReplaceAll(data, []byte(`{{- if ne $.TargetVersionName "ga" -}}-{{$.TargetVersionName}}{{- end }}`)) + data = r.ReplaceAll(data, []byte(``)) // Replace .erb r, err = regexp.Compile(`\.erb`) @@ -504,6 +691,16 @@ func checkExceptionList(filePath string) bool { "custom_flatten/bigquery_table_ref_query_destinationtable.go", "unordered_list_customize_diff", "default_if_empty", + + // TODO: remove the following files from the exception list after all of the services are migrated to Go + // It will generate diffs when partial services are migrated. + "provider/provider_mmv1_resources.go.erb", + "provider/provider.go.erb", + "fwmodels/provider_model.go.erb", + "fwprovider/framework_provider.go.erb", + "fwtransport/framework_config.go.erb", + "sweeper/gcp_sweeper_test.go.erb", + "transport/config.go.erb", } for _, t := range exceptionPaths { diff --git a/mmv1/templates/terraform/constants/go/datastream_stream.go.tmpl b/mmv1/templates/terraform/constants/go/datastream_stream.go.tmpl index 0bc5c0e76850..a5f35834357a 100644 --- a/mmv1/templates/terraform/constants/go/datastream_stream.go.tmpl +++ b/mmv1/templates/terraform/constants/go/datastream_stream.go.tmpl @@ -76,4 +76,4 @@ func resourceDatastreamStreamDatabaseIdDiffSuppress(_, old, new string, _ *schem return old == new } -{{- end }} +{{ end }} diff --git a/mmv1/templates/terraform/constants/go/router_nat.go.tmpl b/mmv1/templates/terraform/constants/go/router_nat.go.tmpl index c40c3073acb8..f7550929c0ac 100644 --- a/mmv1/templates/terraform/constants/go/router_nat.go.tmpl +++ b/mmv1/templates/terraform/constants/go/router_nat.go.tmpl @@ -96,7 +96,7 @@ func computeRouterNatIPsHash(v interface{}) int { return schema.HashString(tpgresource.GetResourceNameFromSelfLink(val)) } -{{- if ne $.TargetVersionName "ga" }} +{{ if ne $.TargetVersionName `ga` -}} func computeRouterNatRulesSubnetHash(v interface{}) int { return computeRouterNatIPsHash(v) } @@ -145,7 +145,7 @@ func computeRouterNatRulesHash(v interface{}) int { } } - {{- if ne $.TargetVersionName "ga" }} + {{ if ne $.TargetVersionName `ga` -}} sourceNatActiveRanges := action["source_nat_active_ranges"] if sourceNatActiveRanges != nil { sourceNatActiveRangesSet := sourceNatActiveRanges.(*schema.Set) diff --git a/mmv1/templates/terraform/custom_delete/go/active_directory_domain_trust.go.tmpl b/mmv1/templates/terraform/custom_delete/go/active_directory_domain_trust.go.tmpl index dc2c6f62d125..2b9dfd7bbce6 100644 --- a/mmv1/templates/terraform/custom_delete/go/active_directory_domain_trust.go.tmpl +++ b/mmv1/templates/terraform/custom_delete/go/active_directory_domain_trust.go.tmpl @@ -8,7 +8,7 @@ return err } - {{- /* The generate DELETE method isn't including the {trust: } object in the response body thus custom_delete is needed */}} + {{/* The generate DELETE method isn't including the {trust: } object in the response body thus custom_delete is needed */ -}} obj := make(map[string]interface{}) targetDomainNameProp, err := expandNestedActiveDirectoryDomainTrustTargetDomainName(d.Get("target_domain_name"), d, config) diff --git a/mmv1/templates/terraform/custom_delete/go/monitoring_uptime_check_config.go.tmpl b/mmv1/templates/terraform/custom_delete/go/monitoring_uptime_check_config.go.tmpl index 190d76b338dd..30dc2bf22c6a 100644 --- a/mmv1/templates/terraform/custom_delete/go/monitoring_uptime_check_config.go.tmpl +++ b/mmv1/templates/terraform/custom_delete/go/monitoring_uptime_check_config.go.tmpl @@ -38,8 +38,8 @@ res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ }) -{{- /* The generated DELETE method returns a generic error for 400. - Need to include a help message about deleting associated Alert Policies. */}} +{{/* The generated DELETE method returns a generic error for 400. + Need to include a help message about deleting associated Alert Policies. */ -}} if err != nil { if transport_tpg.IsGoogleApiErrorWithCode(err, 400) { diff --git a/mmv1/templates/terraform/custom_expand/go/privateca_certificate_509_config.go.tmpl b/mmv1/templates/terraform/custom_expand/go/privateca_certificate_509_config.go.tmpl index c5b7c5b4a30e..deb1179b7cee 100644 --- a/mmv1/templates/terraform/custom_expand/go/privateca_certificate_509_config.go.tmpl +++ b/mmv1/templates/terraform/custom_expand/go/privateca_certificate_509_config.go.tmpl @@ -1,4 +1,4 @@ -{{- /* See mmv1/third_party/terraform/utils/privateca_utils.go for the sub-expanders and explanation */}} +{{/* See mmv1/third_party/terraform/utils/privateca_utils.go for the sub-expanders and explanation */}} func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { if v == nil { return v, nil diff --git a/mmv1/templates/terraform/custom_expand/go/reference_to_backend.tmpl b/mmv1/templates/terraform/custom_expand/go/reference_to_backend.tmpl index bcdd41d6f9df..dac33816188e 100644 --- a/mmv1/templates/terraform/custom_expand/go/reference_to_backend.tmpl +++ b/mmv1/templates/terraform/custom_expand/go/reference_to_backend.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* This provides the best long-form self link possible given the input. +{{/* This provides the best long-form self link possible given the input. If the input is a full URL including scheme, we return it unmodified https://compute.googleapis.com/v1/projects/foo/regions/bar/backendBuckets/baz -> (the same) If the input is a partial self-link, we return it with the compute base path in front. @@ -21,7 +21,7 @@ If the input is just project/region/name, region/name, or just name, we treat it like a backendService. baz -> https://compute.googleapis.com/v1/projects/provider-project/regions/provider-region/backendServices/baz bar/baz -> https://compute.googleapis.com/v1/projects/provider-project/regions/bar/backendServices/baz - foo/bar/baz -> https://compute.googleapis.com/v1/projects/foo/regions/bar/backendServices/baz */}} + foo/bar/baz -> https://compute.googleapis.com/v1/projects/foo/regions/bar/backendServices/baz */ -}} func expand{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { // This method returns a full self link from whatever the input is. if v == nil || v.(string) == "" { diff --git a/mmv1/templates/terraform/custom_flatten/accesscontextmanager_serviceperimeters_custom_flatten.go.erb b/mmv1/templates/terraform/custom_flatten/accesscontextmanager_serviceperimeters_custom_flatten.go.erb new file mode 100644 index 000000000000..acd645f8d830 --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/accesscontextmanager_serviceperimeters_custom_flatten.go.erb @@ -0,0 +1,811 @@ +func flatten<%= prefix -%><%= titlelize_property(property) -%>(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + apiData := make([]map[string]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + continue + } + apiData = append(apiData, map[string]interface{}{ + "name": flattenAccessContextManagerServicePerimetersServicePerimetersName(original["name"], d, config), + "title": flattenAccessContextManagerServicePerimetersServicePerimetersTitle(original["title"], d, config), + "description": flattenAccessContextManagerServicePerimetersServicePerimetersDescription(original["description"], d, config), + "create_time": flattenAccessContextManagerServicePerimetersServicePerimetersCreateTime(original["createTime"], d, config), + "update_time": flattenAccessContextManagerServicePerimetersServicePerimetersUpdateTime(original["updateTime"], d, config), + "perimeter_type": flattenAccessContextManagerServicePerimetersServicePerimetersPerimeterType(original["perimeterType"], d, config), + "status": flattenAccessContextManagerServicePerimetersServicePerimetersStatus(original["status"], d, config), + "spec": flattenAccessContextManagerServicePerimetersServicePerimetersSpec(original["spec"], d, config), + "use_explicit_dry_run_spec": flattenAccessContextManagerServicePerimetersServicePerimetersUseExplicitDryRunSpec(original["useExplicitDryRunSpec"], d, config), + }) + } + configData := []map[string]interface{}{} + for _, item := range d.Get("service_perimeters").([]interface{}) { + configData = append(configData, item.(map[string]interface{})) + } + sorted, err := tpgresource.SortMapsByConfigOrder(configData, apiData, "name") + if err != nil { + log.Printf("[ERROR] Could not sort API response value: %s", err) + return v + } + + return sorted +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersTitle(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersDescription(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersCreateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersUpdateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersPerimeterType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil || tpgresource.IsEmptyValue(reflect.ValueOf(v)) { + return "PERIMETER_TYPE_REGULAR" + } + + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatus(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusResources(original["resources"], d, config) + transformed["access_levels"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusAccessLevels(original["accessLevels"], d, config) + transformed["restricted_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusRestrictedServices(original["restrictedServices"], d, config) + transformed["vpc_accessible_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServices(original["vpcAccessibleServices"], d, config) + transformed["ingress_policies"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPolicies(original["ingressPolicies"], d, config) + transformed["egress_policies"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPolicies(original["egressPolicies"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusAccessLevels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusRestrictedServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["enable_restriction"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServicesEnableRestriction(original["enableRestriction"], d, config) + transformed["allowed_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServicesAllowedServices(original["allowedServices"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServicesEnableRestriction(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServicesAllowedServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPolicies(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := schema.NewSet(schema.HashResource(accesscontextmanagerServicePerimetersServicePerimetersServicePerimetersStatusIngressPoliciesSchema()), []interface{}{}) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed.Add(map[string]interface{}{ + "ingress_from": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFrom(original["ingressFrom"], d, config), + "ingress_to": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressTo(original["ingressTo"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["identity_type"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromIdentityType(original["identityType"], d, config) + transformed["identities"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromIdentities(original["identities"], d, config) + transformed["sources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSources(original["sources"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromIdentityType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromIdentities(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "access_level": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSourcesAccessLevel(original["accessLevel"], d, config), + "resource": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSourcesResource(original["resource"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSourcesAccessLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSourcesResource(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressTo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToResources(original["resources"], d, config) + transformed["operations"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperations(original["operations"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "service_name": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsServiceName(original["serviceName"], d, config), + "method_selectors": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectors(original["methodSelectors"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsServiceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectors(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "method": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectorsMethod(original["method"], d, config), + "permission": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectorsPermission(original["permission"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectorsMethod(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectorsPermission(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPolicies(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "egress_from": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFrom(original["egressFrom"], d, config), + "egress_to": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressTo(original["egressTo"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["identity_type"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromIdentityType(original["identityType"], d, config) + transformed["identities"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromIdentities(original["identities"], d, config) + transformed["sources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSources(original["sources"], d, config) + transformed["source_restriction"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSourceRestriction(original["sourceRestriction"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromIdentityType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromIdentities(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "access_level": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSourcesAccessLevel(original["accessLevel"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSourcesAccessLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSourceRestriction(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressTo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToResources(original["resources"], d, config) + transformed["external_resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToExternalResources(original["externalResources"], d, config) + transformed["operations"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperations(original["operations"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToExternalResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "service_name": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsServiceName(original["serviceName"], d, config), + "method_selectors": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectors(original["methodSelectors"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsServiceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectors(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "method": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectorsMethod(original["method"], d, config), + "permission": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectorsPermission(original["permission"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectorsMethod(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectorsPermission(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpec(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecResources(original["resources"], d, config) + transformed["access_levels"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecAccessLevels(original["accessLevels"], d, config) + transformed["restricted_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecRestrictedServices(original["restrictedServices"], d, config) + transformed["vpc_accessible_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServices(original["vpcAccessibleServices"], d, config) + transformed["ingress_policies"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPolicies(original["ingressPolicies"], d, config) + transformed["egress_policies"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPolicies(original["egressPolicies"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecAccessLevels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecRestrictedServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["enable_restriction"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServicesEnableRestriction(original["enableRestriction"], d, config) + transformed["allowed_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServicesAllowedServices(original["allowedServices"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServicesEnableRestriction(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServicesAllowedServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPolicies(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "ingress_from": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFrom(original["ingressFrom"], d, config), + "ingress_to": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressTo(original["ingressTo"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["identity_type"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromIdentityType(original["identityType"], d, config) + transformed["identities"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromIdentities(original["identities"], d, config) + transformed["sources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSources(original["sources"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromIdentityType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromIdentities(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "access_level": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSourcesAccessLevel(original["accessLevel"], d, config), + "resource": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSourcesResource(original["resource"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSourcesAccessLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSourcesResource(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressTo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToResources(original["resources"], d, config) + transformed["operations"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperations(original["operations"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "service_name": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsServiceName(original["serviceName"], d, config), + "method_selectors": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectors(original["methodSelectors"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsServiceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectors(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "method": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectorsMethod(original["method"], d, config), + "permission": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectorsPermission(original["permission"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectorsMethod(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectorsPermission(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPolicies(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "egress_from": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFrom(original["egressFrom"], d, config), + "egress_to": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressTo(original["egressTo"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["identity_type"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromIdentityType(original["identityType"], d, config) + transformed["identities"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromIdentities(original["identities"], d, config) + transformed["sources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSources(original["sources"], d, config) + transformed["source_restriction"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSourceRestriction(original["sourceRestriction"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromIdentityType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromIdentities(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "access_level": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSourcesAccessLevel(original["accessLevel"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSourcesAccessLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSourceRestriction(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressTo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToResources(original["resources"], d, config) + transformed["external_resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToExternalResources(original["externalResources"], d, config) + transformed["operations"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperations(original["operations"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToExternalResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "service_name": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsServiceName(original["serviceName"], d, config), + "method_selectors": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectors(original["methodSelectors"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsServiceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectors(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "method": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectorsMethod(original["method"], d, config), + "permission": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectorsPermission(original["permission"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectorsMethod(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectorsPermission(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersUseExplicitDryRunSpec(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} diff --git a/mmv1/templates/terraform/custom_flatten/go/accesscontextmanager_serviceperimeters_custom_flatten.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/accesscontextmanager_serviceperimeters_custom_flatten.go.tmpl new file mode 100644 index 000000000000..231fc3f35c04 --- /dev/null +++ b/mmv1/templates/terraform/custom_flatten/go/accesscontextmanager_serviceperimeters_custom_flatten.go.tmpl @@ -0,0 +1,811 @@ +func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + apiData := make([]map[string]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + continue + } + apiData = append(apiData, map[string]interface{}{ + "name": flattenAccessContextManagerServicePerimetersServicePerimetersName(original["name"], d, config), + "title": flattenAccessContextManagerServicePerimetersServicePerimetersTitle(original["title"], d, config), + "description": flattenAccessContextManagerServicePerimetersServicePerimetersDescription(original["description"], d, config), + "create_time": flattenAccessContextManagerServicePerimetersServicePerimetersCreateTime(original["createTime"], d, config), + "update_time": flattenAccessContextManagerServicePerimetersServicePerimetersUpdateTime(original["updateTime"], d, config), + "perimeter_type": flattenAccessContextManagerServicePerimetersServicePerimetersPerimeterType(original["perimeterType"], d, config), + "status": flattenAccessContextManagerServicePerimetersServicePerimetersStatus(original["status"], d, config), + "spec": flattenAccessContextManagerServicePerimetersServicePerimetersSpec(original["spec"], d, config), + "use_explicit_dry_run_spec": flattenAccessContextManagerServicePerimetersServicePerimetersUseExplicitDryRunSpec(original["useExplicitDryRunSpec"], d, config), + }) + } + configData := []map[string]interface{}{} + for _, item := range d.Get("service_perimeters").([]interface{}) { + configData = append(configData, item.(map[string]interface{})) + } + sorted, err := tpgresource.SortMapsByConfigOrder(configData, apiData, "name") + if err != nil { + log.Printf("[ERROR] Could not sort API response value: %s", err) + return v + } + + return sorted +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersTitle(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersDescription(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersCreateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersUpdateTime(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersPerimeterType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil || tpgresource.IsEmptyValue(reflect.ValueOf(v)) { + return "PERIMETER_TYPE_REGULAR" + } + + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatus(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusResources(original["resources"], d, config) + transformed["access_levels"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusAccessLevels(original["accessLevels"], d, config) + transformed["restricted_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusRestrictedServices(original["restrictedServices"], d, config) + transformed["vpc_accessible_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServices(original["vpcAccessibleServices"], d, config) + transformed["ingress_policies"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPolicies(original["ingressPolicies"], d, config) + transformed["egress_policies"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPolicies(original["egressPolicies"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusAccessLevels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusRestrictedServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["enable_restriction"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServicesEnableRestriction(original["enableRestriction"], d, config) + transformed["allowed_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServicesAllowedServices(original["allowedServices"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServicesEnableRestriction(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusVpcAccessibleServicesAllowedServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPolicies(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := schema.NewSet(schema.HashResource(accesscontextmanagerServicePerimetersServicePerimetersServicePerimetersStatusIngressPoliciesSchema()), []interface{}{}) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed.Add(map[string]interface{}{ + "ingress_from": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFrom(original["ingressFrom"], d, config), + "ingress_to": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressTo(original["ingressTo"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["identity_type"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromIdentityType(original["identityType"], d, config) + transformed["identities"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromIdentities(original["identities"], d, config) + transformed["sources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSources(original["sources"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromIdentityType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromIdentities(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "access_level": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSourcesAccessLevel(original["accessLevel"], d, config), + "resource": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSourcesResource(original["resource"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSourcesAccessLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressFromSourcesResource(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressTo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToResources(original["resources"], d, config) + transformed["operations"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperations(original["operations"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "service_name": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsServiceName(original["serviceName"], d, config), + "method_selectors": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectors(original["methodSelectors"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsServiceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectors(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "method": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectorsMethod(original["method"], d, config), + "permission": flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectorsPermission(original["permission"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectorsMethod(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusIngressPoliciesIngressToOperationsMethodSelectorsPermission(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPolicies(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "egress_from": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFrom(original["egressFrom"], d, config), + "egress_to": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressTo(original["egressTo"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["identity_type"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromIdentityType(original["identityType"], d, config) + transformed["identities"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromIdentities(original["identities"], d, config) + transformed["sources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSources(original["sources"], d, config) + transformed["source_restriction"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSourceRestriction(original["sourceRestriction"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromIdentityType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromIdentities(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "access_level": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSourcesAccessLevel(original["accessLevel"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSourcesAccessLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressFromSourceRestriction(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressTo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToResources(original["resources"], d, config) + transformed["external_resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToExternalResources(original["externalResources"], d, config) + transformed["operations"] = + flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperations(original["operations"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToExternalResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "service_name": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsServiceName(original["serviceName"], d, config), + "method_selectors": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectors(original["methodSelectors"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsServiceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectors(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "method": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectorsMethod(original["method"], d, config), + "permission": flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectorsPermission(original["permission"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectorsMethod(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersStatusEgressPoliciesEgressToOperationsMethodSelectorsPermission(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpec(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecResources(original["resources"], d, config) + transformed["access_levels"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecAccessLevels(original["accessLevels"], d, config) + transformed["restricted_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecRestrictedServices(original["restrictedServices"], d, config) + transformed["vpc_accessible_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServices(original["vpcAccessibleServices"], d, config) + transformed["ingress_policies"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPolicies(original["ingressPolicies"], d, config) + transformed["egress_policies"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPolicies(original["egressPolicies"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecAccessLevels(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecRestrictedServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["enable_restriction"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServicesEnableRestriction(original["enableRestriction"], d, config) + transformed["allowed_services"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServicesAllowedServices(original["allowedServices"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServicesEnableRestriction(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecVpcAccessibleServicesAllowedServices(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPolicies(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "ingress_from": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFrom(original["ingressFrom"], d, config), + "ingress_to": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressTo(original["ingressTo"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["identity_type"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromIdentityType(original["identityType"], d, config) + transformed["identities"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromIdentities(original["identities"], d, config) + transformed["sources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSources(original["sources"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromIdentityType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromIdentities(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "access_level": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSourcesAccessLevel(original["accessLevel"], d, config), + "resource": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSourcesResource(original["resource"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSourcesAccessLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressFromSourcesResource(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressTo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToResources(original["resources"], d, config) + transformed["operations"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperations(original["operations"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "service_name": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsServiceName(original["serviceName"], d, config), + "method_selectors": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectors(original["methodSelectors"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsServiceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectors(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "method": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectorsMethod(original["method"], d, config), + "permission": flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectorsPermission(original["permission"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectorsMethod(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecIngressPoliciesIngressToOperationsMethodSelectorsPermission(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPolicies(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "egress_from": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFrom(original["egressFrom"], d, config), + "egress_to": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressTo(original["egressTo"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFrom(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["identity_type"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromIdentityType(original["identityType"], d, config) + transformed["identities"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromIdentities(original["identities"], d, config) + transformed["sources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSources(original["sources"], d, config) + transformed["source_restriction"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSourceRestriction(original["sourceRestriction"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromIdentityType(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromIdentities(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "access_level": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSourcesAccessLevel(original["accessLevel"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSourcesAccessLevel(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressFromSourceRestriction(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressTo(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToResources(original["resources"], d, config) + transformed["external_resources"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToExternalResources(original["externalResources"], d, config) + transformed["operations"] = + flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperations(original["operations"], d, config) + return []interface{}{transformed} +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToExternalResources(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + return schema.NewSet(schema.HashString, v.([]interface{})) +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperations(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "service_name": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsServiceName(original["serviceName"], d, config), + "method_selectors": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectors(original["methodSelectors"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsServiceName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectors(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if v == nil { + return v + } + l := v.([]interface{}) + transformed := make([]interface{}, 0, len(l)) + for _, raw := range l { + original := raw.(map[string]interface{}) + if len(original) < 1 { + // Do not include empty json objects coming back from the api + continue + } + transformed = append(transformed, map[string]interface{}{ + "method": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectorsMethod(original["method"], d, config), + "permission": flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectorsPermission(original["permission"], d, config), + }) + } + return transformed +} +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectorsMethod(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersSpecEgressPoliciesEgressToOperationsMethodSelectorsPermission(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenAccessContextManagerServicePerimetersServicePerimetersUseExplicitDryRunSpec(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_forward_ssh_connectivity_password.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_forward_ssh_connectivity_password.go.tmpl index 6686da377457..3d39974fbfe2 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_forward_ssh_connectivity_password.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_forward_ssh_connectivity_password.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("forward_ssh_connectivity.0.password") } diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_forward_ssh_connectivity_private_key.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_forward_ssh_connectivity_private_key.go.tmpl index b9ba7b80e025..e78406acd183 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_forward_ssh_connectivity_private_key.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_forward_ssh_connectivity_private_key.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("forward_ssh_connectivity.0.private_key") } diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_password.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_password.go.tmpl index a13a80c652ad..6b9fa6fdb940 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_password.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_password.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("mysql_profile.0.password") } diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_ca_certificate.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_ca_certificate.go.tmpl index 085c8a4017dc..16045abdbd41 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_ca_certificate.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_ca_certificate.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("mysql_profile.0.ssl_config.0.ca_certificate") } diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_client_certificate.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_client_certificate.go.tmpl index 748ee3af5070..27439273695b 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_client_certificate.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_client_certificate.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("mysql_profile.0.ssl_config.0.client_certificate") } diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_client_key.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_client_key.go.tmpl index c672cf97634d..d20130e621a5 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_client_key.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_mysql_profile_ssl_config_client_key.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("mysql_profile.0.ssl_config.0.client_key") } diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_oracle_profile_password.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_oracle_profile_password.go.tmpl index 6fc344c88f3b..dfd854d7a631 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_oracle_profile_password.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_oracle_profile_password.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("oracle_profile.0.password") } diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_postgresql_profile_password.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_postgresql_profile_password.go.tmpl index c68160c71e7a..b4c7ac7f58da 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_postgresql_profile_password.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_postgresql_profile_password.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("postgresql_profile.0.password") } diff --git a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_sql_server_profile_password.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_sql_server_profile_password.go.tmpl index 2c39ddfa4edd..dfb1adb3079a 100644 --- a/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_sql_server_profile_password.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/datastream_connection_profile_sql_server_profile_password.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} +{{/* Workaround for https://github.com/hashicorp/terraform-provider-google/issues/12410 */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { return d.Get("sql_server_profile.0.password") } diff --git a/mmv1/templates/terraform/custom_flatten/go/guard_self_link.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/guard_self_link.go.tmpl index b409691f8a1a..ebdfcd113dd2 100644 --- a/mmv1/templates/terraform/custom_flatten/go/guard_self_link.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/guard_self_link.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Not all self links behave like ResourceRef expects, eg they may expect a fully qualified url. In those +{{/* Not all self links behave like ResourceRef expects, eg they may expect a fully qualified url. In those cases, we need to manually define this flattener. */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { if v == nil { diff --git a/mmv1/templates/terraform/custom_flatten/go/guard_self_link_array.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/guard_self_link_array.go.tmpl index e6772ff622f4..991d78e9eb36 100644 --- a/mmv1/templates/terraform/custom_flatten/go/guard_self_link_array.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/guard_self_link_array.go.tmpl @@ -10,7 +10,7 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* This should be used for multi-resource ref fields that can't be made to real resource refs yet */}} +{{/* This should be used for multi-resource ref fields that can't be made to real resource refs yet */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { if v == nil { return v diff --git a/mmv1/templates/terraform/custom_flatten/go/privateca_certificate_509_config.go.tmpl b/mmv1/templates/terraform/custom_flatten/go/privateca_certificate_509_config.go.tmpl index d5b8bfa2a772..c08addd0f5e3 100644 --- a/mmv1/templates/terraform/custom_flatten/go/privateca_certificate_509_config.go.tmpl +++ b/mmv1/templates/terraform/custom_flatten/go/privateca_certificate_509_config.go.tmpl @@ -1,4 +1,4 @@ -{{- /* See mmv1/third_party/terraform/utils/privateca_utils.go for the sub-expanders and explanation */}} +{{/* See mmv1/third_party/terraform/utils/privateca_utils.go for the sub-expanders and explanation */}} func flatten{{$.GetPrefix}}{{$.TitlelizeProperty}}(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { if v == nil { v = make(map[string]interface{}) diff --git a/mmv1/templates/terraform/decoders/go/region_backend_service.go.tmpl b/mmv1/templates/terraform/decoders/go/region_backend_service.go.tmpl index 024fd0117bbe..560cd1243da7 100644 --- a/mmv1/templates/terraform/decoders/go/region_backend_service.go.tmpl +++ b/mmv1/templates/terraform/decoders/go/region_backend_service.go.tmpl @@ -22,7 +22,7 @@ if ok && m["enabled"] == false { delete(res, "iap") } -{{- if ne $.TargetVersionName "ga" }} +{{ if ne $.TargetVersionName `ga` -}} // Since we add in a NONE subsetting policy, we need to remove it in some // cases for backwards compatibility with the config v, ok = res["subsetting"] diff --git a/mmv1/templates/terraform/encoders/cloudfunctions2_runtime_update_policy.go.erb b/mmv1/templates/terraform/encoders/cloudfunctions2_runtime_update_policy.go.erb new file mode 100644 index 000000000000..db4ef3e273ee --- /dev/null +++ b/mmv1/templates/terraform/encoders/cloudfunctions2_runtime_update_policy.go.erb @@ -0,0 +1,15 @@ +if obj == nil || obj["buildConfig"] == nil { + return obj, nil +} + +build_config := obj["buildConfig"].(map[string]interface{}) + +// Automatic Update policy is the default from API, unset it if the data +// contains the on-deploy policy. +if build_config["onDeployUpdatePolicy"] != nil { + delete(build_config, "automaticUpdatePolicy") +} + +obj["buildConfig"] = build_config + +return obj, nil diff --git a/mmv1/templates/terraform/encoders/go/bigtable_app_profile.go.tmpl b/mmv1/templates/terraform/encoders/go/bigtable_app_profile.go.tmpl index c08c105443be..848352f99c92 100644 --- a/mmv1/templates/terraform/encoders/go/bigtable_app_profile.go.tmpl +++ b/mmv1/templates/terraform/encoders/go/bigtable_app_profile.go.tmpl @@ -10,9 +10,9 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} -{{- /* Because instance is a URL param only, it does not get expanded and +{{/* Because instance is a URL param only, it does not get expanded and the URL is constructed from ResourceData. Set it in - state and use a encoder instead of a field expander */}} + state and use a encoder instead of a field expander */ -}} // Instance is a URL parameter only, so replace self-link/path with resource name only. if err := d.Set("instance", tpgresource.GetResourceNameFromSelfLink(d.Get("instance").(string))); err != nil { return nil, fmt.Errorf("Error setting instance: %s", err) diff --git a/mmv1/templates/terraform/encoders/go/disk.tmpl b/mmv1/templates/terraform/encoders/go/disk.tmpl index d32ecae6a410..51901dcbf237 100644 --- a/mmv1/templates/terraform/encoders/go/disk.tmpl +++ b/mmv1/templates/terraform/encoders/go/disk.tmpl @@ -10,7 +10,7 @@ if err != nil { return nil, err } -{{- if eq $.Name "Disk" }} +{{ if eq $.Name "Disk" -}} if v, ok := d.GetOk("type"); ok { log.Printf("[DEBUG] Loading disk type: %s", v.(string)) diskType, err := readDiskType(config, d, v.(string)) diff --git a/mmv1/templates/terraform/encoders/go/health_check_type.tmpl b/mmv1/templates/terraform/encoders/go/health_check_type.tmpl index e58d074f46a3..e5d7cdae36b6 100644 --- a/mmv1/templates/terraform/encoders/go/health_check_type.tmpl +++ b/mmv1/templates/terraform/encoders/go/health_check_type.tmpl @@ -10,7 +10,6 @@ See the License for the specific language governing permissions and limitations under the License. */ -}} - if _, ok := d.GetOk("http_health_check"); ok { hc := d.Get("http_health_check").([]interface{})[0] ps := hc.(map[string]interface{})["port_specification"] diff --git a/mmv1/templates/terraform/encoders/go/region_backend_service.go.tmpl b/mmv1/templates/terraform/encoders/go/region_backend_service.go.tmpl index dd54de781463..40010b8ac301 100644 --- a/mmv1/templates/terraform/encoders/go/region_backend_service.go.tmpl +++ b/mmv1/templates/terraform/encoders/go/region_backend_service.go.tmpl @@ -32,7 +32,7 @@ if d.Get("load_balancing_scheme").(string) == "EXTERNAL_MANAGED" || d.Get("load_ return obj, nil } -{{- if ne $.TargetVersionName "ga" }} +{{ if ne $.TargetVersionName `ga` -}} // To remove subsetting on an ILB, "NONE" must be specified. If subsetting // isn't specified, we set the value to NONE to make this use case work. _, ok := obj["subsetting"] diff --git a/mmv1/templates/terraform/encoders/go/spanner_database.go.tmpl b/mmv1/templates/terraform/encoders/go/spanner_database.go.tmpl index 0e8ae39b76cf..78cd8aba964d 100644 --- a/mmv1/templates/terraform/encoders/go/spanner_database.go.tmpl +++ b/mmv1/templates/terraform/encoders/go/spanner_database.go.tmpl @@ -10,7 +10,7 @@ log.Printf("[DEBUG] Preparing to create new Database. Any extra DDL statements w delete(obj, "name") delete(obj, "instance") -{{- /* These are added back in post-create, but do not remove for Validator. */}} +{{/* These are added back in post-create, but do not remove for Validator. */}} {{- if ne $.Compiler "terraformgoogleconversion-codegen" }} delete(obj, "versionRetentionPeriod") delete(obj, "extraStatements") diff --git a/mmv1/templates/terraform/encoders/health_check_type.erb b/mmv1/templates/terraform/encoders/health_check_type.erb index efedcc412be0..a3efcd50d134 100644 --- a/mmv1/templates/terraform/encoders/health_check_type.erb +++ b/mmv1/templates/terraform/encoders/health_check_type.erb @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -%> - if _, ok := d.GetOk("http_health_check"); ok { hc := d.Get("http_health_check").([]interface{})[0] ps := hc.(map[string]interface{})["port_specification"] diff --git a/mmv1/templates/terraform/examples/cloudfunctions2_abiu.tf.erb b/mmv1/templates/terraform/examples/cloudfunctions2_abiu.tf.erb new file mode 100644 index 000000000000..be7a2a08ac52 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudfunctions2_abiu.tf.erb @@ -0,0 +1,72 @@ +locals { + project = "<%= ctx[:test_env_vars]['project'] %>" # Google Cloud Platform Project ID +} + +resource "google_service_account" "account" { + provider = google-beta + account_id = "<%= ctx[:vars]['service_account'] %>" + display_name = "Test Service Account" +} + +resource "google_pubsub_topic" "topic" { + provider = google-beta + name = "<%= ctx[:vars]['topic'] %>" +} + +resource "google_storage_bucket" "bucket" { + provider = google-beta + name = "${local.project}-<%= ctx[:vars]['bucket_name'] %>" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + provider = google-beta + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "<%= ctx[:vars]['zip_path'] %>" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "<%= ctx[:primary_resource_id] %>" { + provider = google-beta + name = "<%= ctx[:vars]['function'] %>" + location = "europe-west6" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloPubSub" # Set the entry point + environment_variables = { + BUILD_CONFIG_TEST = "build_test" + } + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + automatic_update_policy {} + } + + service_config { + max_instance_count = 3 + min_instance_count = 1 + available_memory = "4Gi" + timeout_seconds = 60 + max_instance_request_concurrency = 80 + available_cpu = "4" + environment_variables = { + SERVICE_CONFIG_TEST = "config_test" + } + ingress_settings = "ALLOW_INTERNAL_ONLY" + all_traffic_on_latest_revision = true + service_account_email = google_service_account.account.email + } + + event_trigger { + trigger_region = "us-central1" + event_type = "google.cloud.pubsub.topic.v1.messagePublished" + pubsub_topic = google_pubsub_topic.topic.id + retry_policy = "RETRY_POLICY_RETRY" + } +} diff --git a/mmv1/templates/terraform/examples/cloudfunctions2_abiu_on_deploy.tf.erb b/mmv1/templates/terraform/examples/cloudfunctions2_abiu_on_deploy.tf.erb new file mode 100644 index 000000000000..61ca7d460ce7 --- /dev/null +++ b/mmv1/templates/terraform/examples/cloudfunctions2_abiu_on_deploy.tf.erb @@ -0,0 +1,72 @@ +locals { + project = "<%= ctx[:test_env_vars]['project'] %>" # Google Cloud Platform Project ID +} + +resource "google_service_account" "account" { + provider = google-beta + account_id = "<%= ctx[:vars]['service_account'] %>" + display_name = "Test Service Account" +} + +resource "google_pubsub_topic" "topic" { + provider = google-beta + name = "<%= ctx[:vars]['topic'] %>" +} + +resource "google_storage_bucket" "bucket" { + provider = google-beta + name = "${local.project}-<%= ctx[:vars]['bucket_name'] %>" # Every bucket name must be globally unique + location = "US" + uniform_bucket_level_access = true +} + +resource "google_storage_bucket_object" "object" { + provider = google-beta + name = "function-source.zip" + bucket = google_storage_bucket.bucket.name + source = "<%= ctx[:vars]['zip_path'] %>" # Add path to the zipped function source code +} + +resource "google_cloudfunctions2_function" "<%= ctx[:primary_resource_id] %>" { + provider = google-beta + name = "<%= ctx[:vars]['function'] %>" + location = "europe-west6" + description = "a new function" + + build_config { + runtime = "nodejs16" + entry_point = "helloPubSub" # Set the entry point + environment_variables = { + BUILD_CONFIG_TEST = "build_test" + } + source { + storage_source { + bucket = google_storage_bucket.bucket.name + object = google_storage_bucket_object.object.name + } + } + on_deploy_update_policy {} + } + + service_config { + max_instance_count = 3 + min_instance_count = 1 + available_memory = "4Gi" + timeout_seconds = 60 + max_instance_request_concurrency = 80 + available_cpu = "4" + environment_variables = { + SERVICE_CONFIG_TEST = "config_test" + } + ingress_settings = "ALLOW_INTERNAL_ONLY" + all_traffic_on_latest_revision = true + service_account_email = google_service_account.account.email + } + + event_trigger { + trigger_region = "us-central1" + event_type = "google.cloud.pubsub.topic.v1.messagePublished" + pubsub_topic = google_pubsub_topic.topic.id + retry_policy = "RETRY_POLICY_RETRY" + } +} diff --git a/mmv1/templates/terraform/examples/data_fusion_instance_psc.tf.erb b/mmv1/templates/terraform/examples/data_fusion_instance_psc.tf.erb new file mode 100644 index 000000000000..16e25435dfe3 --- /dev/null +++ b/mmv1/templates/terraform/examples/data_fusion_instance_psc.tf.erb @@ -0,0 +1,39 @@ +resource "google_data_fusion_instance" "<%= ctx[:primary_resource_id] %>" { + name = "<%= ctx[:vars]["instance_name"] %>" + region = "us-central1" + type = "BASIC" + private_instance = true + + network_config { + connection_type = "PRIVATE_SERVICE_CONNECT_INTERFACES" + private_service_connect_config { + network_attachment = google_compute_network_attachment.psc.id + unreachable_cidr_block = "192.168.0.0/25" + } + } + + <%= ctx[:vars]['prober_test_run'] %> +} + +resource "google_compute_network" "psc" { + name = "<%= ctx[:vars]["network_name"] %>" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "psc" { + name = "<%= ctx[:vars]["subnet_name"] %>" + region = "us-central1" + + network = google_compute_network.psc.id + ip_cidr_range = "10.0.0.0/16" +} + +resource "google_compute_network_attachment" "psc" { + name = "<%= ctx[:vars]["attachment_name"] %>" + region = "us-central1" + connection_preference = "ACCEPT_AUTOMATIC" + + subnetworks = [ + google_compute_subnetwork.psc.self_link + ] +} diff --git a/mmv1/templates/terraform/examples/go/bigquery_dataset_resource_tags.tf.tmpl b/mmv1/templates/terraform/examples/go/bigquery_dataset_resource_tags.tf.tmpl new file mode 100644 index 000000000000..b7366659c55e --- /dev/null +++ b/mmv1/templates/terraform/examples/go/bigquery_dataset_resource_tags.tf.tmpl @@ -0,0 +1,41 @@ +data "google_project" "project" { + provider = "google-beta" +} + +resource "google_tags_tag_key" "tag_key1" { + provider = "google-beta" + parent = "projects/${data.google_project.project.number}" + short_name = "{{index $.Vars "tag_key1"}}" +} + +resource "google_tags_tag_value" "tag_value1" { + provider = "google-beta" + parent = "tagKeys/${google_tags_tag_key.tag_key1.name}" + short_name = "{{index $.Vars "tag_value1"}}" +} + +resource "google_tags_tag_key" "tag_key2" { + provider = "google-beta" + parent = "projects/${data.google_project.project.number}" + short_name = "{{index $.Vars "tag_key2"}}" +} + +resource "google_tags_tag_value" "tag_value2" { + provider = "google-beta" + parent = "tagKeys/${google_tags_tag_key.tag_key2.name}" + short_name = "{{index $.Vars "tag_value2"}}" +} + +resource "google_bigquery_dataset" "{{$.PrimaryResourceId}}" { + provider = google-beta + + dataset_id = "{{index $.Vars "dataset_id"}}" + friendly_name = "test" + description = "This is a test description" + location = "EU" + + resource_tags = { + "${data.google_project.project.project_id}/${google_tags_tag_key.tag_key1.short_name}" = "${google_tags_tag_value.tag_value1.short_name}" + "${data.google_project.project.project_id}/${google_tags_tag_key.tag_key2.short_name}" = "${google_tags_tag_value.tag_value2.short_name}" + } +} diff --git a/mmv1/templates/terraform/examples/go/network_attachment_basic.tf.tmpl b/mmv1/templates/terraform/examples/go/network_attachment_basic.tf.tmpl index 121cb0fa7483..948b3a02ba43 100644 --- a/mmv1/templates/terraform/examples/go/network_attachment_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/go/network_attachment_basic.tf.tmpl @@ -1,5 +1,4 @@ resource "google_compute_network_attachment" "default" { - provider = google-beta name = "{{index $.Vars "resource_name"}}" region = "us-central1" description = "basic network attachment description" @@ -19,13 +18,11 @@ resource "google_compute_network_attachment" "default" { } resource "google_compute_network" "default" { - provider = google-beta name = "{{index $.Vars "network_name"}}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "default" { - provider = google-beta name = "{{index $.Vars "subnetwork_name"}}" region = "us-central1" @@ -34,7 +31,6 @@ resource "google_compute_subnetwork" "default" { } resource "google_project" "rejected_producer_project" { - provider = google-beta project_id = "{{index $.Vars "rejected_producer_project_name"}}" name = "{{index $.Vars "rejected_producer_project_name"}}" org_id = "{{index $.TestEnvVars "org_id"}}" @@ -42,7 +38,6 @@ resource "google_project" "rejected_producer_project" { } resource "google_project" "accepted_producer_project" { - provider = google-beta project_id = "{{index $.Vars "accepted_producer_project_name"}}" name = "{{index $.Vars "accepted_producer_project_name"}}" org_id = "{{index $.TestEnvVars "org_id"}}" diff --git a/mmv1/templates/terraform/examples/go/network_attachment_instance_usage.tf.tmpl b/mmv1/templates/terraform/examples/go/network_attachment_instance_usage.tf.tmpl index 6a44bee01a67..df90307115da 100644 --- a/mmv1/templates/terraform/examples/go/network_attachment_instance_usage.tf.tmpl +++ b/mmv1/templates/terraform/examples/go/network_attachment_instance_usage.tf.tmpl @@ -1,11 +1,9 @@ resource "google_compute_network" "default" { - provider = google-beta name = "{{index $.Vars "network_name"}}" auto_create_subnetworks = false } resource "google_compute_subnetwork" "default" { - provider = google-beta name = "{{index $.Vars "subnetwork_name"}}" region = "us-central1" @@ -14,7 +12,6 @@ resource "google_compute_subnetwork" "default" { } resource "google_compute_network_attachment" "{{$.PrimaryResourceId}}" { - provider = google-beta name = "{{index $.Vars "resource_name"}}" region = "us-central1" description = "my basic network attachment" @@ -24,7 +21,6 @@ resource "google_compute_network_attachment" "{{$.PrimaryResourceId}}" { } resource "google_compute_instance" "default" { - provider = google-beta name = "{{index $.Vars "instance_name"}}" zone = "us-central1-a" machine_type = "e2-micro" diff --git a/mmv1/templates/terraform/examples/go/tpu_node_basic.tf.tmpl b/mmv1/templates/terraform/examples/go/tpu_node_basic.tf.tmpl index be56977b3a37..9f516fd9ad65 100644 --- a/mmv1/templates/terraform/examples/go/tpu_node_basic.tf.tmpl +++ b/mmv1/templates/terraform/examples/go/tpu_node_basic.tf.tmpl @@ -1,6 +1,6 @@ -{{- /* WARNING: cidr_block must not overlap with other existing TPU blocks +{{/* WARNING: cidr_block must not overlap with other existing TPU blocks Make sure if you change this value that it does not overlap with the - autogenerated examples. */}} + autogenerated examples. */ -}} data "google_tpu_tensorflow_versions" "available" { } diff --git a/mmv1/templates/terraform/examples/go/tpu_node_full.tf.tmpl b/mmv1/templates/terraform/examples/go/tpu_node_full.tf.tmpl index 26517fd56b85..4432f204ae80 100644 --- a/mmv1/templates/terraform/examples/go/tpu_node_full.tf.tmpl +++ b/mmv1/templates/terraform/examples/go/tpu_node_full.tf.tmpl @@ -1,9 +1,9 @@ data "google_tpu_tensorflow_versions" "available" { } -{{- /* WARNING: cidr_block must not overlap with other existing TPU blocks +{{/* WARNING: cidr_block must not overlap with other existing TPU blocks Make sure if you change this value that it does not overlap with the - autogenerated examples. */}} + autogenerated examples. */ -}} resource "google_tpu_node" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "node_name"}}" @@ -15,10 +15,10 @@ resource "google_tpu_node" "{{$.PrimaryResourceId}}" { description = "Terraform Google Provider test TPU" use_service_networking = true -{{- /* We previously used a separate network resource here, but TPUs only allow using 50 +{{/* We previously used a separate network resource here, but TPUs only allow using 50 different network names, ever. This caused our tests to start failing, so just use the default network in order to still demonstrate using as many fields as - possible on the resource. */}} + possible on the resource. */ -}} network = google_service_networking_connection.private_service_connection.network labels = { diff --git a/mmv1/templates/terraform/examples/go/tpu_node_full_test.tf.tmpl b/mmv1/templates/terraform/examples/go/tpu_node_full_test.tf.tmpl index 073a05285273..7f7090d678bd 100644 --- a/mmv1/templates/terraform/examples/go/tpu_node_full_test.tf.tmpl +++ b/mmv1/templates/terraform/examples/go/tpu_node_full_test.tf.tmpl @@ -1,6 +1,6 @@ -{{- /* WARNING: cidr_block must not overlap with other existing TPU blocks +{{/* WARNING: cidr_block must not overlap with other existing TPU blocks Make sure if you change this value that it does not overlap with the - autogenerated examples. */}} + autogenerated examples. */ -}} resource "google_tpu_node" "{{$.PrimaryResourceId}}" { name = "{{index $.Vars "node_name"}}" @@ -8,18 +8,18 @@ resource "google_tpu_node" "{{$.PrimaryResourceId}}" { accelerator_type = "v3-8" -{{- /* We previously used the first available version from the +{{/* We previously used the first available version from the google_tpu_tensorflow_versions data source. However, this started to return a random set of versions which caused our tests to occasionally fail, so we pin - tensorflow_version to a specific version so that our tests pass reliably. */}} + tensorflow_version to a specific version so that our tests pass reliably. */ -}} tensorflow_version = "2.10.0" description = "Terraform Google Provider test TPU" use_service_networking = true -{{- /* We previously used a separate network resource here, but TPUs only allow using 50 +{{/* We previously used a separate network resource here, but TPUs only allow using 50 different network names, ever. This caused our tests to start failing, so just use the default network in order to still demonstrate using as many fields as - possible on the resource. */}} + possible on the resource. */ -}} network = data.google_compute_network.network.id labels = { diff --git a/mmv1/templates/terraform/examples/go/url_map_custom_error_response_policy.tf.tmpl b/mmv1/templates/terraform/examples/go/url_map_custom_error_response_policy.tf.tmpl new file mode 100644 index 000000000000..6b3a72411f47 --- /dev/null +++ b/mmv1/templates/terraform/examples/go/url_map_custom_error_response_policy.tf.tmpl @@ -0,0 +1,86 @@ +resource "google_compute_url_map" "{{$.PrimaryResourceId}}" { + provider = google-beta + name = "{{index $.Vars "url_map_name"}}" + description = "a description" + + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx"] # All 5xx responses will be catched + path = "/*" + override_response_code = 502 + } + error_service = google_compute_backend_bucket.error.id + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "mysite" + } + + path_matcher { + name = "mysite" + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["4xx", "5xx"] # All 4xx and 5xx responses will be catched on path login + path = "/login" + override_response_code = 404 + } + error_response_rule { + match_response_codes = ["503"] # Only a 503 response will be catched on path example + path = "/example" + override_response_code = 502 + } + error_service = google_compute_backend_bucket.error.id + } + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.example.id + + custom_error_response_policy { + error_response_rule { + match_response_codes = ["4xx"] + path = "/register" + override_response_code = 401 + } + error_service = google_compute_backend_bucket.error.id + } + } + } +} + +resource "google_compute_backend_service" "example" { + provider = google-beta + name = "{{index $.Vars "backend_service_name"}}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_http_health_check.default.id] +} + +resource "google_compute_http_health_check" "default" { + provider = google-beta + name = "{{index $.Vars "http_health_check_name"}}" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_backend_bucket" "error" { + provider = google-beta + name = "{{index $.Vars "error_backend_bucket_name"}}" + bucket_name = google_storage_bucket.error.name + enable_cdn = true +} + +resource "google_storage_bucket" "error" { + provider = google-beta + name = "{{index $.Vars "storage_bucket_name"}}" + location = "US" +} diff --git a/mmv1/templates/terraform/examples/go/vertex_ai_featureonlinestore_with_optimized.tf.tmpl b/mmv1/templates/terraform/examples/go/vertex_ai_featureonlinestore_with_optimized.tf.tmpl new file mode 100644 index 000000000000..acbdcb5b461c --- /dev/null +++ b/mmv1/templates/terraform/examples/go/vertex_ai_featureonlinestore_with_optimized.tf.tmpl @@ -0,0 +1,21 @@ +resource "google_vertex_ai_feature_online_store" "{{$.PrimaryResourceId}}" { + provider = google + name = "{{index $.Vars "name"}}" + labels = { + foo = "bar" + } + region = "us-central1" + optimized {} + dedicated_serving_endpoint { + private_service_connect_config { + enable_private_service_connect = true + project_allowlist = [data.google_project.project.number] + } + } +} + +data "google_project" "project" { + provider = google +} + + diff --git a/mmv1/templates/terraform/examples/go/vpc_access_connector.tf.tmpl b/mmv1/templates/terraform/examples/go/vpc_access_connector.tf.tmpl index dc3e00536625..bd34a18b1ca0 100644 --- a/mmv1/templates/terraform/examples/go/vpc_access_connector.tf.tmpl +++ b/mmv1/templates/terraform/examples/go/vpc_access_connector.tf.tmpl @@ -1,5 +1,5 @@ resource "google_vpc_access_connector" "connector" { name = "{{index $.Vars "name"}}" ip_cidr_range = "10.8.0.0/28" - network = "default" + network = "{{index $.Vars "network_name"}}" } diff --git a/mmv1/templates/terraform/examples/go/vpc_access_connector_shared_vpc.tf.tmpl b/mmv1/templates/terraform/examples/go/vpc_access_connector_shared_vpc.tf.tmpl index 142bfca18406..b66e96ba8ab1 100644 --- a/mmv1/templates/terraform/examples/go/vpc_access_connector_shared_vpc.tf.tmpl +++ b/mmv1/templates/terraform/examples/go/vpc_access_connector_shared_vpc.tf.tmpl @@ -10,10 +10,5 @@ resource "google_compute_subnetwork" "custom_test" { name = "{{index $.Vars "name"}}" ip_cidr_range = "10.2.0.0/28" region = "us-central1" - network = google_compute_network.custom_test.id -} - -resource "google_compute_network" "custom_test" { - name = "{{index $.Vars "name"}}" - auto_create_subnetworks = false + network = "{{index $.Vars "network_name"}}" } \ No newline at end of file diff --git a/mmv1/templates/terraform/examples/healthcare_dataset_cmek.tf.erb b/mmv1/templates/terraform/examples/healthcare_dataset_cmek.tf.erb new file mode 100644 index 000000000000..f841ed515a0f --- /dev/null +++ b/mmv1/templates/terraform/examples/healthcare_dataset_cmek.tf.erb @@ -0,0 +1,36 @@ +data "google_project" "project" {} + +resource "google_healthcare_dataset" "default" { + name = "<%= ctx[:vars]['dataset_name'] %>" + location = "us-central1" + time_zone = "UTC" + + encryption_spec { + kms_key_name = google_kms_crypto_key.crypto_key.id + } + + depends_on = [ + google_kms_crypto_key_iam_binding.healthcare_cmek_keyuser + ] +} + +resource "google_kms_crypto_key" "crypto_key" { + name = "<%= ctx[:vars]['key_name'] %>" + key_ring = google_kms_key_ring.key_ring.id + purpose = "ENCRYPT_DECRYPT" +} + +resource "google_kms_key_ring" "key_ring" { + name = "<%= ctx[:vars]['keyring_name'] %>" + location = "us-central1" +} + +resource "google_kms_crypto_key_iam_binding" "healthcare_cmek_keyuser" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + members = [ + "serviceAccount:service-${data.google_project.project.number}@gcp-sa-healthcare.iam.gserviceaccount.com", + ] +} + + diff --git a/mmv1/templates/terraform/examples/network_attachment_basic.tf.erb b/mmv1/templates/terraform/examples/network_attachment_basic.tf.erb index a0196c7a4b3f..3fe14b29adfe 100644 --- a/mmv1/templates/terraform/examples/network_attachment_basic.tf.erb +++ b/mmv1/templates/terraform/examples/network_attachment_basic.tf.erb @@ -1,5 +1,4 @@ resource "google_compute_network_attachment" "default" { - provider = google-beta name = "<%= ctx[:vars]['resource_name'] %>" region = "us-central1" description = "basic network attachment description" @@ -19,13 +18,11 @@ resource "google_compute_network_attachment" "default" { } resource "google_compute_network" "default" { - provider = google-beta name = "<%= ctx[:vars]['network_name'] %>" auto_create_subnetworks = false } resource "google_compute_subnetwork" "default" { - provider = google-beta name = "<%= ctx[:vars]['subnetwork_name'] %>" region = "us-central1" @@ -34,7 +31,6 @@ resource "google_compute_subnetwork" "default" { } resource "google_project" "rejected_producer_project" { - provider = google-beta project_id = "<%= ctx[:vars]['rejected_producer_project_name'] %>" name = "<%= ctx[:vars]['rejected_producer_project_name'] %>" org_id = "<%= ctx[:test_env_vars]['org_id'] %>" @@ -42,7 +38,6 @@ resource "google_project" "rejected_producer_project" { } resource "google_project" "accepted_producer_project" { - provider = google-beta project_id = "<%= ctx[:vars]['accepted_producer_project_name'] %>" name = "<%= ctx[:vars]['accepted_producer_project_name'] %>" org_id = "<%= ctx[:test_env_vars]['org_id'] %>" diff --git a/mmv1/templates/terraform/examples/network_attachment_instance_usage.tf.erb b/mmv1/templates/terraform/examples/network_attachment_instance_usage.tf.erb index 6e5963d781c8..b1696a1201fe 100644 --- a/mmv1/templates/terraform/examples/network_attachment_instance_usage.tf.erb +++ b/mmv1/templates/terraform/examples/network_attachment_instance_usage.tf.erb @@ -1,11 +1,9 @@ resource "google_compute_network" "default" { - provider = google-beta name = "<%= ctx[:vars]['network_name'] %>" auto_create_subnetworks = false } resource "google_compute_subnetwork" "default" { - provider = google-beta name = "<%= ctx[:vars]['subnetwork_name'] %>" region = "us-central1" @@ -14,7 +12,6 @@ resource "google_compute_subnetwork" "default" { } resource "google_compute_network_attachment" "<%= ctx[:primary_resource_id] %>" { - provider = google-beta name = "<%= ctx[:vars]['resource_name'] %>" region = "us-central1" description = "my basic network attachment" @@ -24,7 +21,6 @@ resource "google_compute_network_attachment" "<%= ctx[:primary_resource_id] %>" } resource "google_compute_instance" "default" { - provider = google-beta name = "<%= ctx[:vars]['instance_name'] %>" zone = "us-central1-a" machine_type = "e2-micro" diff --git a/mmv1/templates/terraform/examples/url_map_custom_error_response_policy.tf.erb b/mmv1/templates/terraform/examples/url_map_custom_error_response_policy.tf.erb new file mode 100644 index 000000000000..32c94d52e68d --- /dev/null +++ b/mmv1/templates/terraform/examples/url_map_custom_error_response_policy.tf.erb @@ -0,0 +1,86 @@ +resource "google_compute_url_map" "<%= ctx[:primary_resource_id] %>" { + provider = google-beta + name = "<%= ctx[:vars]['url_map_name'] %>" + description = "a description" + + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx"] # All 5xx responses will be catched + path = "/*" + override_response_code = 502 + } + error_service = google_compute_backend_bucket.error.id + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "mysite" + } + + path_matcher { + name = "mysite" + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["4xx", "5xx"] # All 4xx and 5xx responses will be catched on path login + path = "/login" + override_response_code = 404 + } + error_response_rule { + match_response_codes = ["503"] # Only a 503 response will be catched on path example + path = "/example" + override_response_code = 502 + } + error_service = google_compute_backend_bucket.error.id + } + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.example.id + + custom_error_response_policy { + error_response_rule { + match_response_codes = ["4xx"] + path = "/register" + override_response_code = 401 + } + error_service = google_compute_backend_bucket.error.id + } + } + } +} + +resource "google_compute_backend_service" "example" { + provider = google-beta + name = "<%= ctx[:vars]['backend_service_name'] %>" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_http_health_check.default.id] +} + +resource "google_compute_http_health_check" "default" { + provider = google-beta + name = "<%= ctx[:vars]['http_health_check_name'] %>" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_backend_bucket" "error" { + provider = google-beta + name = "<%= ctx[:vars]['error_backend_bucket_name'] %>" + bucket_name = google_storage_bucket.error.name + enable_cdn = true +} + +resource "google_storage_bucket" "error" { + provider = google-beta + name = "<%= ctx[:vars]['storage_bucket_name'] %>" + location = "US" +} diff --git a/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_with_beta_fields_optimized.tf.erb b/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_with_optimized.tf.erb similarity index 89% rename from mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_with_beta_fields_optimized.tf.erb rename to mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_with_optimized.tf.erb index 40be04127914..d3e5e3c85edc 100644 --- a/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_with_beta_fields_optimized.tf.erb +++ b/mmv1/templates/terraform/examples/vertex_ai_featureonlinestore_with_optimized.tf.erb @@ -1,5 +1,5 @@ resource "google_vertex_ai_feature_online_store" "<%= ctx[:primary_resource_id] %>" { - provider = google-beta + provider = google name = "<%= ctx[:vars]['name'] %>" labels = { foo = "bar" @@ -15,7 +15,7 @@ resource "google_vertex_ai_feature_online_store" "<%= ctx[:primary_resource_id] } data "google_project" "project" { - provider = google-beta + provider = google } diff --git a/mmv1/templates/terraform/post_update/go/workbench_instance.go.tmpl b/mmv1/templates/terraform/post_update/go/workbench_instance.go.tmpl index cd216f018387..604ee537a488 100644 --- a/mmv1/templates/terraform/post_update/go/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/post_update/go/workbench_instance.go.tmpl @@ -1,7 +1,7 @@ state := d.Get("state").(string) desired_state := d.Get("desired_state").(string) -if state != desired_state { +if state != desired_state || stopInstance{ verb := "start" if desired_state == "STOPPED" { verb = "stop" @@ -15,6 +15,13 @@ if state != desired_state { return fmt.Errorf("Error waiting to modify Workbench Instance state: %s", err) } + if verb == "start"{ + if err := waitForWorkbenchInstanceActive(d, config, d.Timeout(schema.TimeoutUpdate) - time.Minute); err != nil { + return fmt.Errorf("Workbench instance %q did not reach ACTIVE state: %q", d.Get("name").(string), err) + } + + } + } else { log.Printf("[DEBUG] Workbench Instance %q has state %q.", name, state) } diff --git a/mmv1/templates/terraform/pre_create/firebasehosting_site.go.erb b/mmv1/templates/terraform/pre_create/firebasehosting_site.go.erb new file mode 100644 index 000000000000..3989189d0049 --- /dev/null +++ b/mmv1/templates/terraform/pre_create/firebasehosting_site.go.erb @@ -0,0 +1,27 @@ + +// Check if the Firebase hostng site already exits. Do an update if so. + +getUrl, err := tpgresource.ReplaceVars(d, config, "{{FirebaseHostingBasePath}}projects/{{project}}/sites/{{site_id}}") +if err != nil { + return err +} +_, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: getUrl, + UserAgent: userAgent, + Headers: headers, +}) + +if err == nil { + // Hosting site already exists + log.Printf("[DEBUG] Firebase hosting site already exists %s", d.Get("site_id")) + // Replace import id for the resource id + id, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/sites/{{site_id}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + return resourceFirebaseHostingSiteUpdate(d, meta) +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_create/go/firebasehosting_site.go.tmpl b/mmv1/templates/terraform/pre_create/go/firebasehosting_site.go.tmpl new file mode 100644 index 000000000000..f31de67be01f --- /dev/null +++ b/mmv1/templates/terraform/pre_create/go/firebasehosting_site.go.tmpl @@ -0,0 +1,27 @@ + +// Check if the Firebase hostng site already exits. Do an update if so. + +getUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}FirebaseHostingBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/sites/{{"{{"}}site_id{{"}}"}}") +if err != nil { + return err +} +_, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: getUrl, + UserAgent: userAgent, + Headers: headers, +}) + +if err == nil { + // Hosting site already exists + log.Printf("[DEBUG] Firebase hosting site already exists %s", d.Get("site_id")) + // Replace import id for the resource id + id, err := tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/sites/{{"{{"}}site_id{{"}}"}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + return resourceFirebaseHostingSiteUpdate(d, meta) +} \ No newline at end of file diff --git a/mmv1/templates/terraform/pre_delete/go/compute_region_network_endpoint.go.tmpl b/mmv1/templates/terraform/pre_delete/go/compute_region_network_endpoint.go.tmpl index 3d0169cd742b..e17a021da2d6 100644 --- a/mmv1/templates/terraform/pre_delete/go/compute_region_network_endpoint.go.tmpl +++ b/mmv1/templates/terraform/pre_delete/go/compute_region_network_endpoint.go.tmpl @@ -27,7 +27,7 @@ if fqdnProp != "" { toDelete["fqdn"] = fqdnProp } -{{- if ne $.TargetVersionName "ga" }} +{{ if ne $.TargetVersionName `ga` -}} // Instance instanceProp, err := expandNestedComputeRegionNetworkEndpointInstance(d.Get("instance"), d, config) if err != nil { diff --git a/mmv1/templates/terraform/pre_update/go/workbench_instance.go.tmpl b/mmv1/templates/terraform/pre_update/go/workbench_instance.go.tmpl index 847a0bcd1311..b1fb82cd28c9 100644 --- a/mmv1/templates/terraform/pre_update/go/workbench_instance.go.tmpl +++ b/mmv1/templates/terraform/pre_update/go/workbench_instance.go.tmpl @@ -1,5 +1,41 @@ +// Build custom mask since the notebooks API does not support gce_setup as a valid mask +stopInstance := false +newUpdateMask := []string{} +if d.HasChange("gce_setup.0.machine_type") { + newUpdateMask = append(newUpdateMask, "gce_setup.machine_type") + stopInstance = true +} +if d.HasChange("gce_setup.0.accelerator_configs") { + newUpdateMask = append(newUpdateMask, "gce_setup.accelerator_configs") + stopInstance = true +} +if d.HasChange("gce_setup.0.shielded_instance_config.0.enable_secure_boot") { + newUpdateMask = append(newUpdateMask, "gce_setup.shielded_instance_config.enable_secure_boot") + stopInstance = true +} +if d.HasChange("gce_setup.0.shielded_instance_config.0.enable_vtpm") { + newUpdateMask = append(newUpdateMask, "gce_setup.shielded_instance_config.enable_vtpm") + stopInstance = true +} +if d.HasChange("gce_setup.0.shielded_instance_config.0.enable_integrity_monitoring") { + newUpdateMask = append(newUpdateMask, "gce_setup.shielded_instance_config.enable_integrity_monitoring") + stopInstance = true +} +if d.HasChange("gce_setup.0.metadata") { + newUpdateMask = append(newUpdateMask, "gceSetup.metadata") +} +if d.HasChange("effective_labels") { + newUpdateMask = append(newUpdateMask, "labels") +} +updateMask = newUpdateMask +// Overwrite the previously set mask. +url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(newUpdateMask, ",")}) +if err != nil { + return err +} + name := d.Get("name").(string) -if d.HasChange("gce_setup.0.machine_type") || d.HasChange("gce_setup.0.accelerator_configs") || d.HasChange("gce_setup.0.shielded_instance_config"){ +if stopInstance{ state := d.Get("state").(string) if state != "STOPPED" { @@ -20,26 +56,9 @@ if d.HasChange("gce_setup.0.machine_type") || d.HasChange("gce_setup.0.accelerat log.Printf("[DEBUG] Workbench Instance %q need not be stopped for the update.", name) } -// Build custom mask since the notebooks API does not support gce_setup as a valid mask -newUpdateMask := []string{} -if d.HasChange("gce_setup.0.machine_type") { - newUpdateMask = append(newUpdateMask, "gce_setup.machine_type") +if d.HasChange("gce_setup.0.boot_disk.0.disk_size_gb") { + resizeWorkbenchInstanceDisk(config, d, project, userAgent, true) } -if d.HasChange("gce_setup.0.accelerator_configs") { - newUpdateMask = append(newUpdateMask, "gce_setup.accelerator_configs") -} -if d.HasChange("gce_setup.0.shielded_instance_config") { - newUpdateMask = append(newUpdateMask, "gce_setup.shielded_instance_config") -} -if d.HasChange("gce_setup.0.metadata") { - newUpdateMask = append(newUpdateMask, "gceSetup.metadata") -} -if d.HasChange("effective_labels") { - newUpdateMask = append(newUpdateMask, "labels") -} - -// Overwrite the previously set mask. -url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": strings.Join(newUpdateMask, ",")}) -if err != nil { - return err +if d.HasChange("gce_setup.0.data_disks.0.disk_size_gb") { + resizeWorkbenchInstanceDisk(config, d, project, userAgent, false) } diff --git a/mmv1/templates/terraform/resource.go.tmpl b/mmv1/templates/terraform/resource.go.tmpl index 97f3863d2962..f1fce957a967 100644 --- a/mmv1/templates/terraform/resource.go.tmpl +++ b/mmv1/templates/terraform/resource.go.tmpl @@ -70,7 +70,7 @@ func Resource{{ $.ResourceName -}}() *schema.Resource { return &schema.Resource{ Create: resource{{ $.ResourceName -}}Create, Read: resource{{ $.ResourceName -}}Read, -{{- if $.Updatable -}} {{/* ##TODO Q2 || $.root_labels? -}} */}} +{{- if or $.Updatable $.RootLabels }} Update: resource{{ $.ResourceName -}}Update, {{- end}} Delete: resource{{ $.ResourceName -}}Delete, @@ -84,7 +84,7 @@ func Resource{{ $.ResourceName -}}() *schema.Resource { Timeouts: &schema.ResourceTimeout { Create: schema.DefaultTimeout({{ $.Timeouts.InsertMinutes -}} * time.Minute), -{{- if $.Updatable -}} {{/* ##TODO Q2 || $.root_labels? -}} */}} +{{- if or $.Updatable $.RootLabels }} Update: schema.DefaultTimeout({{ $.Timeouts.UpdateMinutes -}} * time.Minute), {{- end}} Delete: schema.DefaultTimeout({{ $.Timeouts.DeleteMinutes -}} * time.Minute), @@ -137,12 +137,12 @@ func Resource{{ $.ResourceName -}}() *schema.Resource { {{- if $.VirtualFields -}} {{- range $field := $.VirtualFields }} "{{ $field.Name -}}": { - Type: schema.{{ $field.Type -}}, + Type: {{ $field.TFType $field.Type -}}, Optional: true, -{{ if $field.Immutable -}} +{{- if $field.Immutable }} ForceNew: true, {{- end}} -{{ if $field.DefaultValue -}} +{{- if not (eq $field.DefaultValue nil) }} Default: {{ $field.GoLiteral $field.DefaultValue -}}, {{- end}} Description: `{{ replace $field.GetDescription "`" "'" -1 -}}`, @@ -182,7 +182,7 @@ func resource{{ $.ResourceName }}{{ camelize $prop.Name "upper" }}SetStyleDiff(_ {{- end}} func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{}) error { -{{- if and ($.GetAsync.IsA "OpAsync") ($.GetAsync.IncludeProject) ($.GetAsync.Allow "Create") -}} +{{- if and ($.GetAsync) (and ($.GetAsync.IsA "OpAsync") ($.GetAsync.IncludeProject) ($.GetAsync.Allow "Create")) -}} var project string {{- end}} config := meta.(*transport_tpg.Config) @@ -273,8 +273,10 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ billingProject = bp } - {{/*TODO Q2 COMPILE PRECREATE */}} headers := make(http.Header) +{{- if $.CustomCode.PreCreate }} + {{ $.CustomTemplate $.CustomCode.PreCreate false -}} +{{- end}} res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, Method: "{{ upper $.CreateVerb -}}", @@ -298,7 +300,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ return fmt.Errorf("Error creating {{ $.Name -}}: %s", err) } {{/* # Set resource properties from create API response (unless it returns an Operation) */}} -{{if not ($.GetAsync.IsA "OpAsync") }} +{{if and ($.GetAsync) (not ($.GetAsync.IsA "OpAsync")) }} {{- range $prop := $.GettableProperties }} {{ if and ($.IsInIdentity $prop) $prop.Output }} if err := d.Set("{{ underscore $prop.Name -}}", flatten{{ if $.NestedQuery -}}Nested{{end}}{{ $.ResourceName -}}{{ camelize $prop.Name "upper" -}}(res["{{ $prop.ApiName -}}"], d, config)); err != nil { @@ -315,7 +317,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ } d.SetId(id) -{{if ($.GetAsync.Allow "Create") -}} +{{if and $.GetAsync ($.GetAsync.Allow "Create") -}} {{if ($.GetAsync.IsA "OpAsync") -}} {{if and $.GetAsync.Result.ResourceInsideResponse $.GetIdentity -}} // Use the resource in the operation response to populate @@ -397,7 +399,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{- $.CustomTemplate $.CustomCode.PostCreate false -}} {{- end}} -{{if $.GetAsync.Allow "Create" -}} +{{if and ($.GetAsync) ($.GetAsync.Allow "Create") -}} {{if $.GetAsync.IsA "PollAsync" -}} err = transport_tpg.PollingWaitTime(resource{{ $.ResourceName -}}PollRead(d, meta), {{ $.GetAsync.CheckResponseFuncExistence -}}, "Creating {{ $.Name -}}", d.Timeout(schema.TimeoutCreate), {{ $.GetAsync.TargetOccurrences -}}) if err != nil { @@ -420,7 +422,7 @@ func resource{{ $.ResourceName -}}Create(d *schema.ResourceData, meta interface{ {{ end -}} } -{{if ($.GetAsync.IsA "PollAsync")}} +{{if and ($.GetAsync) ($.GetAsync.IsA "PollAsync")}} func resource{{ $.ResourceName -}}PollRead(d *schema.ResourceData, meta interface{}) transport_tpg.PollReadFunc { return func() (map[string]interface{}, error) { {{if $.GetAsync.CustomPollRead -}} @@ -604,7 +606,7 @@ func resource{{ $.ResourceName -}}Read(d *schema.ResourceData, meta interface{}) {{- if $.VirtualFields -}} // Explicitly set virtual fields to default values if unset {{- range $prop := $.VirtualFields }} -{{ if $prop.DefaultValue -}} +{{ if not (eq $prop.DefaultValue nil) -}} if _, ok := d.GetOkExists("{{ $prop.Name -}}"); !ok { if err := d.Set("{{ $prop.Name -}}", {{ $prop.DefaultValue -}}); err != nil { return fmt.Errorf("Error setting {{ $prop.Name -}}: %s", err) @@ -675,7 +677,7 @@ func resource{{ $.ResourceName -}}Read(d *schema.ResourceData, meta interface{}) {{if $.Updatable -}} func resource{{ $.ResourceName -}}Update(d *schema.ResourceData, meta interface{}) error { -{{- if and ($.GetAsync.IsA "OpAsync") ($.GetAsync.IncludeProject) ($.GetAsync.Allow "update") -}} +{{- if and ($.GetAsync) (and ($.GetAsync.IsA "OpAsync") ($.GetAsync.IncludeProject) ($.GetAsync.Allow "update")) -}} var project string {{- end}} config := meta.(*transport_tpg.Config) @@ -802,7 +804,7 @@ if len(updateMask) > 0 { log.Printf("[DEBUG] Finished updating {{ $.Name }} %q: %#v", d.Id(), res) } -{{ if $.GetAsync.Allow "update" -}} +{{ if and ($.GetAsync) ($.GetAsync.Allow "update") -}} {{ if $.GetAsync.IsA "OpAsync" -}} err = {{ $.ClientNamePascal -}}OperationWaitTime( config, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project){{ else }}project{{ end }}, {{ end -}} "Updating {{ $.Name -}}", userAgent, @@ -828,10 +830,11 @@ if len(updateMask) > 0 { {{- end}}{{/*if not immutable*/}} {{ if $.FieldSpecificUpdateMethods }} d.Partial(true) -{{ range $index, $props := $.PropertiesByCustomUpdate }} -if d.HasChange("{{ join ($.PropertyNamesToStrings $props) "\") || d.HasChange(\""}}") { +{{ $CustomUpdateProps := $.PropertiesByCustomUpdate }} +{{ range $group := $.PropertiesByCustomUpdateGroups }} +if d.HasChange("{{ join ($.PropertyNamesToStrings (index $CustomUpdateProps $group)) "\") || d.HasChange(\""}}") { obj := make(map[string]interface{}) -{{ if $index.FingerprintName }} +{{ if $group.FingerprintName }} getUrl, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{$.SelfLinkUri}}") if err != nil { return err @@ -864,10 +867,10 @@ if d.HasChange("{{ join ($.PropertyNamesToStrings $props) "\") || d.HasChange(\" return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("{{ $.ResourceName -}} %q", d.Id())) } - obj["{{ $index.FingerprintName }}"] = getRes["{{ $index.FingerprintName }}"] + obj["{{ $group.FingerprintName }}"] = getRes["{{ $group.FingerprintName }}"] {{ end }}{{/*if FingerprintName*/}} -{{ range $propsByKey := $.CustomUpdatePropertiesByKey $index.UpdateUrl $index.UpdateId $index.FingerprintName $index.UpdateVerb }} +{{ range $propsByKey := $.CustomUpdatePropertiesByKey $group.UpdateUrl $group.UpdateId $group.FingerprintName $group.UpdateVerb }} {{ $propsByKey.ApiName -}}Prop, err := expand{{ if $.NestedQuery -}}Nested{{ end }}{{ $.ResourceName -}}{{ camelize $propsByKey.Name "upper" -}}({{ if $propsByKey.FlattenObject }}nil{{else}}d.Get("{{underscore $propsByKey.Name}}"){{ end }}, d, config) if err != nil { return err @@ -890,9 +893,9 @@ if d.HasChange("{{ join ($.PropertyNamesToStrings $props) "\") || d.HasChange(\" -*/}} {{- if $propsByKey.SendEmptyValue -}} } else if v, ok := d.GetOkExists("{{ underscore $propsByKey.Name -}}"); ok || !reflect.DeepEqual(v, {{ $propsByKey.ApiName -}}Prop) { -{{ else if $propsByKey.FlattenObject -}} +{{- else if $propsByKey.FlattenObject -}} } else if !tpgresource.IsEmptyValue(reflect.ValueOf({{ $propsByKey.ApiName -}}Prop)) { -{{ else -}} +{{- else -}} } else if v, ok := d.GetOkExists("{{ underscore $propsByKey.Name -}}"); !tpgresource.IsEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, {{ $propsByKey.ApiName -}}Prop)) { {{- end}} obj["{{ $propsByKey.ApiName -}}"] = {{ $propsByKey.ApiName -}}Prop @@ -914,7 +917,7 @@ if d.HasChange("{{ join ($.PropertyNamesToStrings $props) "\") || d.HasChange(\" transport_tpg.MutexStore.Lock(lockName) defer transport_tpg.MutexStore.Unlock(lockName) {{- end}} - url, err := tpgresource.ReplaceVars{{if $.LegacyLongFormProject -}}ForId{{ end -}}(d, config, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{ $index.UpdateUrl }}") + url, err := tpgresource.ReplaceVars{{if $.LegacyLongFormProject -}}ForId{{ end -}}(d, config, "{{"{{"}}{{$.ProductMetadata.Name}}BasePath{{"}}"}}{{ $group.UpdateUrl }}") if err != nil { return err } @@ -937,7 +940,7 @@ if d.HasChange("{{ join ($.PropertyNamesToStrings $props) "\") || d.HasChange(\" res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ Config: config, - Method: "{{ $index.UpdateVerb }}", + Method: "{{ $group.UpdateVerb }}", Project: billingProject, RawURL: url, UserAgent: userAgent, @@ -957,7 +960,7 @@ if d.HasChange("{{ join ($.PropertyNamesToStrings $props) "\") || d.HasChange(\" log.Printf("[DEBUG] Finished updating {{ $.Name }} %q: %#v", d.Id(), res) } -{{ if $.GetAsync.Allow "update" -}} +{{ if and ($.GetAsync) ($.GetAsync.Allow "update") -}} {{ if $.GetAsync.IsA "OpAsync" -}} err = {{ $.ClientNamePascal -}}OperationWaitTime( config, res, {{if or $.HasProject $.GetAsync.IncludeProject -}} {{if $.LegacyLongFormProject -}}tpgresource.GetResourceNameFromSelfLink(project){{ else }}project{{ end }}, {{ end -}} "Updating {{ $.Name -}}", userAgent, @@ -992,9 +995,10 @@ func resource{{ $.ResourceName -}}Update(d *schema.ResourceData, meta interface{ // Only the root field "labels" and "terraform_labels" are mutable return resource{{ $.ResourceName -}}Read(d, meta) } -{{- end}} + +{{ end}} func resource{{ $.ResourceName }}Delete(d *schema.ResourceData, meta interface{}) error { -{{- if and (and ($.GetAsync.IsA "OpAsync") $.GetAsync.IncludeProject) ($.GetAsync.Allow "delete")}} +{{- if and ($.GetAsync) (and (and ($.GetAsync.IsA "OpAsync") $.GetAsync.IncludeProject) ($.GetAsync.Allow "delete")) }} var project string {{- end }} {{- if $.SkipDelete }} @@ -1046,7 +1050,7 @@ func resource{{ $.ResourceName }}Delete(d *schema.ResourceData, meta interface{} {{/*Keep this after mutex - patch request data relies on current resource state*/}} obj, err = resource{{ $.ResourceName }}PatchDeleteEncoder(d, meta, obj) if err != nil { - return transport_tpg.HandleNotFoundError(err, d, "{{ $.ResourceName }}") + return transport_tpg.HandleNotFoundError(err, d, "{{ $.Name }}") } {{- if $.UpdateMask }} url, err = transport_tpg.AddQueryParams(url, map[string]string{"updateMask": "{{- join $.NestedQuery.Keys "," -}}"}) @@ -1091,7 +1095,7 @@ func resource{{ $.ResourceName }}Delete(d *schema.ResourceData, meta interface{} if err != nil { return transport_tpg.HandleNotFoundError(err, d, "{{ $.Name }}") } - {{ if $.GetAsync.Allow "Delete" -}} + {{ if and $.GetAsync ($.GetAsync.Allow "Delete") -}} {{ if $.GetAsync.IsA "PollAsync" }} err = transport_tpg.PollingWaitTime(resource{{ $.ResourceName }}PollRead(d, meta), {{ $.GetAsync.CheckResponseFuncAbsence }}, "Deleting {{ $.Name }}", d.Timeout(schema.TimeoutCreate), {{ $.Async.TargetOccurrences }}) if err != nil { @@ -1141,10 +1145,11 @@ func resource{{ $.ResourceName }}Import(d *schema.ResourceData, meta interface{} return nil, fmt.Errorf("Error constructing id: %s", err) } d.SetId(id) - {{- if $.VirtualFields -}} + {{ if $.VirtualFields }} + // Explicitly set virtual fields to default values on import {{- range $vf := $.VirtualFields }} - {{- if $vf.DefaultValue }} + {{- if not (eq $vf.DefaultValue nil) }} if err := d.Set("{{ $vf.Name }}", {{ $vf.DefaultValue }}); err != nil { return nil, fmt.Errorf("Error setting {{ $vf.Name }}: %s", err) } diff --git a/mmv1/templates/terraform/schema_property.go.tmpl b/mmv1/templates/terraform/schema_property.go.tmpl index 17dad86dc26c..44f21449b2c7 100644 --- a/mmv1/templates/terraform/schema_property.go.tmpl +++ b/mmv1/templates/terraform/schema_property.go.tmpl @@ -66,7 +66,7 @@ {{- if .ItemType.DefaultValue -}} Default value: {{ .ItemType.DefaultValue -}} {{- end -}} -Possible values: [{{- .EnumValuesToString "\"" false -}}] +{{- " "}}Possible values: [{{- .ItemType.EnumValuesToString "\"" false -}}] {{- else if and (eq .Type "Enum") (not .Output) -}} {{- if .DefaultValue -}} {{- " "}}Default value: "{{ .DefaultValue -}}" @@ -109,14 +109,20 @@ Possible values: [{{- .EnumValuesToString "\"" false -}}] }, {{ else if eq .ItemType.Type "Enum" -}} Elem: &schema.Schema{ - Type: schema.Type{{ .ItemTypeClass -}}, - ValidateFunc: verify.ValidateEnum([]string{ {{- .ItemType.EnumValuesToString "\"" true -}} }), + Type: schema.TypeString, + {{- if not .Output }} + ValidateFunc: verify.ValidateEnum([]string{ {{- .ItemType.EnumValuesToString "\"" false -}} }), + {{- end }} }, {{ else -}} Elem: &schema.Schema{ - Type: schema.Type{{ .ItemTypeClass -}}, {{ if eq .ItemType.Type "ResourceRef" -}} + Type: schema.TypeString, + {{- if not .Output }} DiffSuppressFunc: tpgresource.CompareSelfLinkOrResourceName, + {{- end }} + {{ else -}} + Type: schema.Type{{ .ItemTypeClass -}}, {{ end -}} }, {{ end -}} diff --git a/mmv1/templates/terraform/update_encoder/go/reservation.go.tmpl b/mmv1/templates/terraform/update_encoder/go/reservation.go.tmpl index 1d26fc9900fe..0aa16a8869a7 100644 --- a/mmv1/templates/terraform/update_encoder/go/reservation.go.tmpl +++ b/mmv1/templates/terraform/update_encoder/go/reservation.go.tmpl @@ -37,10 +37,27 @@ // Set project_map. projectMap := make(map[string]interface{}) old, new := d.GetChange("share_settings") - oldMap := old.([]interface{})[0].(map[string]interface{})["project_map"] - newMap := new.([]interface{})[0].(map[string]interface{})["project_map"] - before := oldMap.(*schema.Set) - after := newMap.(*schema.Set) + + var before *schema.Set + if oldSlice, ok := old.([]interface{}); ok && len(oldSlice) > 0 { + if oldMap, ok := oldSlice[0].(map[string]interface{})["project_map"]; ok { + before = oldMap.(*schema.Set) + } else { + before = schema.NewSet(schema.HashString, []interface{}{}) + } + } else { + before = schema.NewSet(schema.HashString, []interface{}{}) + } + var after *schema.Set + if newSlice, ok := new.([]interface{}); ok && len(newSlice) > 0 { + if newMap, ok := newSlice[0].(map[string]interface{})["project_map"]; ok { + after = newMap.(*schema.Set) + } else { + after = schema.NewSet(schema.HashString, []interface{}{}) + } + } else { + after = schema.NewSet(schema.HashString, []interface{}{}) + } for _, raw := range after.Difference(before).List() { original := raw.(map[string]interface{}) @@ -56,10 +73,10 @@ } projectMap[transformedId] = singleProject // add added projects to updateMask - if firstProject != true { - maskId = fmt.Sprintf("%s%s", "&paths=shareSettings.projectMap.", original["project_id"]) + if !firstProject { + maskId = fmt.Sprintf("%s%s", "&paths=shareSettings.projectMap.", original["id"]) } else { - maskId = fmt.Sprintf("%s%s", "?paths=shareSettings.projectMap.", original["project_id"]) + maskId = fmt.Sprintf("%s%s", "?paths=shareSettings.projectMap.", original["id"]) firstProject = false } decodedPath, _ := url.QueryUnescape(maskId) @@ -86,7 +103,7 @@ projectNum := project.ProjectNumber projectIdOrNum = fmt.Sprintf("%d", projectNum) } - if firstProject != true { + if !firstProject { maskId = fmt.Sprintf("%s%s", "&paths=shareSettings.projectMap.", projectIdOrNum) } else { maskId = fmt.Sprintf("%s%s", "?paths=shareSettings.projectMap.", projectIdOrNum) diff --git a/mmv1/templates/terraform/update_encoder/reservation.go.erb b/mmv1/templates/terraform/update_encoder/reservation.go.erb index 00d6e64107fa..cebe23bc9435 100644 --- a/mmv1/templates/terraform/update_encoder/reservation.go.erb +++ b/mmv1/templates/terraform/update_encoder/reservation.go.erb @@ -39,10 +39,27 @@ // Set project_map. projectMap := make(map[string]interface{}) old, new := d.GetChange("share_settings") - oldMap := old.([]interface{})[0].(map[string]interface{})["project_map"] - newMap := new.([]interface{})[0].(map[string]interface{})["project_map"] - before := oldMap.(*schema.Set) - after := newMap.(*schema.Set) + + var before *schema.Set + if oldSlice, ok := old.([]interface{}); ok && len(oldSlice) > 0 { + if oldMap, ok := oldSlice[0].(map[string]interface{})["project_map"]; ok { + before = oldMap.(*schema.Set) + } else { + before = schema.NewSet(schema.HashString, []interface{}{}) + } + } else { + before = schema.NewSet(schema.HashString, []interface{}{}) + } + var after *schema.Set + if newSlice, ok := new.([]interface{}); ok && len(newSlice) > 0 { + if newMap, ok := newSlice[0].(map[string]interface{})["project_map"]; ok { + after = newMap.(*schema.Set) + } else { + after = schema.NewSet(schema.HashString, []interface{}{}) + } + } else { + after = schema.NewSet(schema.HashString, []interface{}{}) + } for _, raw := range after.Difference(before).List() { original := raw.(map[string]interface{}) @@ -58,10 +75,10 @@ } projectMap[transformedId] = singleProject // add added projects to updateMask - if firstProject != true { - maskId = fmt.Sprintf("%s%s", "&paths=shareSettings.projectMap.", original["project_id"]) + if !firstProject { + maskId = fmt.Sprintf("%s%s", "&paths=shareSettings.projectMap.", original["id"]) } else { - maskId = fmt.Sprintf("%s%s", "?paths=shareSettings.projectMap.", original["project_id"]) + maskId = fmt.Sprintf("%s%s", "?paths=shareSettings.projectMap.", original["id"]) firstProject = false } decodedPath, _ := url.QueryUnescape(maskId) @@ -88,7 +105,7 @@ projectNum := project.ProjectNumber projectIdOrNum = fmt.Sprintf("%d", projectNum) } - if firstProject != true { + if !firstProject { maskId = fmt.Sprintf("%s%s", "&paths=shareSettings.projectMap.", projectIdOrNum) } else { maskId = fmt.Sprintf("%s%s", "?paths=shareSettings.projectMap.", projectIdOrNum) diff --git a/mmv1/templates/terraform/yaml_conversion.erb b/mmv1/templates/terraform/yaml_conversion.erb index 6cf23b2f7a84..e2f06c8688ff 100644 --- a/mmv1/templates/terraform/yaml_conversion.erb +++ b/mmv1/templates/terraform/yaml_conversion.erb @@ -60,19 +60,19 @@ references: <% unless object.docs.nil? -%> docs: <% unless object.docs.warning.nil? -%> - warning: '<%= object.docs.warning %>' + warning: <% end -%> <% unless object.docs.note.nil? -%> note: <% end -%> <% unless object.docs.required_properties.nil? -%> - required_properties: '<%= object.docs.required_properties %>' + required_properties: <% end -%> <% unless object.docs.optional_properties.nil? -%> - optional_properties: '<%= object.docs.optional_properties %>' + optional_properties: <% end -%> <% unless object.docs.attributes.nil? -%> - attributes: '<%= object.docs.attributes %>' + attributes: <% end -%> <% end -%> <% @@ -367,6 +367,9 @@ custom_code: <% unless object.custom_code.post_create.nil? -%> post_create: '<%= object.convert_go_file( object.custom_code.post_create )%>' <% end -%> +<% unless object.custom_code.post_delete.nil? -%> + post_delete: '<%= object.convert_go_file( object.custom_code.post_delete )%>' +<% end -%> <% unless object.custom_code.custom_create.nil? -%> custom_create: '<%= object.convert_go_file( object.custom_code.custom_create )%>' <% end -%> @@ -398,9 +401,14 @@ custom_code: test_check_destroy: '<%= object.convert_go_file( object.custom_code.test_check_destroy )%>' <% end -%> <% end -%> -<% unless object.custom_diff.empty? || (object.custom_diff.size == 1 && object.custom_diff.include?("tpgresource.SetLabelsDiff")) -%> +<% +custom_diff = object.custom_diff.reject { + |cdiff| cdiff == "tpgresource.SetLabelsDiff" || cdiff == "tpgresource.SetMetadataLabelsDiff" || cdiff == "tpgresource.SetAnnotationsDiff" || cdiff == "tpgresource.SetMetadataAnnotationsDiff" +} +-%> +<% unless custom_diff.empty? -%> custom_diff: -<% object.custom_diff.each do |cdiff| -%> +<% custom_diff.each do |cdiff| -%> - '<%= cdiff %>' <% end -%> <% end -%> diff --git a/mmv1/templates/terraform/yaml_conversion_field.erb b/mmv1/templates/terraform/yaml_conversion_field.erb index e2b69f12eeb0..3d9b5d39cf1a 100644 --- a/mmv1/templates/terraform/yaml_conversion_field.erb +++ b/mmv1/templates/terraform/yaml_conversion_field.erb @@ -130,7 +130,7 @@ state_func: '<%= property.state_func %>' <% end -%> <% unless property.set_hash_func.nil? -%> - set_hash_func: '<%= property.set_hash_func %>' + set_hash_func: <% end -%> <% unless property.custom_flatten.nil? -%> custom_flatten: '<%= object.convert_go_file( property.custom_flatten )%>' diff --git a/mmv1/templates/tgc/resource_converters.go.erb b/mmv1/templates/tgc/resource_converters.go.erb index fd999cc3a5ad..7b27f8edc4ff 100644 --- a/mmv1/templates/tgc/resource_converters.go.erb +++ b/mmv1/templates/tgc/resource_converters.go.erb @@ -116,6 +116,8 @@ func ResourceConverters() map[string][]cai.ResourceConverter { "google_compute_node_group": {compute.ResourceConverterComputeNodeGroup()}, "google_logging_folder_bucket_config": {resourceConverterLogFolderBucket()}, "google_logging_organization_bucket_config": {resourceConverterLogOrganizationBucket()}, + "google_logging_project_bucket_config": {resourceConverterLogProjectBucket()}, + "google_logging_billing_account_bucket_config": {resourceConverterLogBillingAccountBucket()}, "google_cloud_tasks_queue": {cloudtasks.ResourceConverterCloudTasksQueue()}, "google_pubsub_topic": {pubsub.ResourceConverterPubsubTopic()}, "google_kms_crypto_key": {kms.ResourceConverterKMSCryptoKey()}, diff --git a/mmv1/third_party/terraform/acctest/go/framework_test_utils.go b/mmv1/third_party/terraform/acctest/go/framework_test_utils.go new file mode 100644 index 000000000000..54cf5d88b2bf --- /dev/null +++ b/mmv1/third_party/terraform/acctest/go/framework_test_utils.go @@ -0,0 +1,75 @@ +package acctest + +import ( + "context" + "fmt" + "log" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/diag" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func GetFwTestProvider(t *testing.T) *frameworkTestProvider { + configsLock.RLock() + fwProvider, ok := fwProviders[t.Name()] + configsLock.RUnlock() + if ok { + return fwProvider + } + + var diags diag.Diagnostics + p := NewFrameworkTestProvider(t.Name()) + configureApiClient(context.Background(), &p.FrameworkProvider, &diags) + if diags.HasError() { + log.Fatalf("%d errors when configuring test provider client: first is %s", diags.ErrorsCount(), diags.Errors()[0].Detail()) + } + + return p +} + +// General test utils + +// TestExtractResourceAttr navigates a test's state to find the specified resource (or data source) attribute and makes the value +// accessible via the attributeValue string pointer. +func TestExtractResourceAttr(resourceName string, attributeName string, attributeValue *string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] // To find a datasource, include `data.` at the start of the resourceName value + + if !ok { + return fmt.Errorf("resource name %s not found in state", resourceName) + } + + attrValue, ok := rs.Primary.Attributes[attributeName] + + if !ok { + return fmt.Errorf("attribute %s not found in resource %s state", attributeName, resourceName) + } + + *attributeValue = attrValue + + return nil + } +} + +// TestCheckAttributeValuesEqual compares two string pointers, which have been used to retrieve attribute values from the test's state. +func TestCheckAttributeValuesEqual(i *string, j *string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if testStringValue(i) != testStringValue(j) { + return fmt.Errorf("attribute values are different, got %s and %s", testStringValue(i), testStringValue(j)) + } + + return nil + } +} + +// testStringValue returns string values from string pointers, handling nil pointers. +func testStringValue(sPtr *string) string { + if sPtr == nil { + return "" + } + + return *sPtr +} diff --git a/mmv1/third_party/terraform/acctest/go/test_utils.go.tmpl b/mmv1/third_party/terraform/acctest/go/test_utils.go.tmpl new file mode 100644 index 000000000000..bb43591b891d --- /dev/null +++ b/mmv1/third_party/terraform/acctest/go/test_utils.go.tmpl @@ -0,0 +1,268 @@ + +package acctest + +import ( + "archive/zip" + "bytes" + "context" + "errors" + "fmt" + "io/ioutil" + "math/rand" + "os" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/providerserver" + "github.com/hashicorp/terraform-plugin-go/tfprotov5" + "github.com/hashicorp/terraform-plugin-mux/tf5muxserver" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func CheckDataSourceStateMatchesResourceState(dataSourceName, resourceName string) func(*terraform.State) error { + return CheckDataSourceStateMatchesResourceStateWithIgnores(dataSourceName, resourceName, map[string]struct{}{}) +} + +func CheckDataSourceStateMatchesResourceStateWithIgnores(dataSourceName, resourceName string, ignoreFields map[string]struct{}) func(*terraform.State) error { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[dataSourceName] + if !ok { + return fmt.Errorf("can't find %s in state", dataSourceName) + } + + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("can't find %s in state", resourceName) + } + + dsAttr := ds.Primary.Attributes + rsAttr := rs.Primary.Attributes + + errMsg := "" + // Data sources are often derived from resources, so iterate over the resource fields to + // make sure all fields are accounted for in the data source. + // If a field exists in the data source but not in the resource, its expected value should + // be checked separately. + for k := range rsAttr { + if _, ok := ignoreFields[k]; ok { + continue + } + if _, ok := ignoreFields["labels.%"]; ok && strings.HasPrefix(k, "labels.") { + continue + } + if _, ok := ignoreFields["terraform_labels.%"]; ok && strings.HasPrefix(k, "terraform_labels.") { + continue + } + if k == "%" { + continue + } + if dsAttr[k] != rsAttr[k] { + // ignore data sources where an empty list is being compared against a null list. + if k[len(k)-1:] == "#" && (dsAttr[k] == "" || dsAttr[k] == "0") && (rsAttr[k] == "" || rsAttr[k] == "0") { + continue + } + errMsg += fmt.Sprintf("%s is %s; want %s\n", k, dsAttr[k], rsAttr[k]) + } + } + + if errMsg != "" { + return errors.New(errMsg) + } + + return nil + } +} + +// General test utils + +// MuxedProviders returns the correct test provider (between the sdk version or the framework version) +func MuxedProviders(testName string) (func() tfprotov5.ProviderServer, error) { + ctx := context.Background() + + providers := []func() tfprotov5.ProviderServer{ + providerserver.NewProtocol5(NewFrameworkTestProvider(testName)), // framework provider + GetSDKProvider(testName).GRPCProvider, // sdk provider + } + + muxServer, err := tf5muxserver.NewMuxServer(ctx, providers...) + + if err != nil { + return nil, err + } + + return muxServer.ProviderServer, nil +} + +func RandString(t *testing.T, length int) string { + if !IsVcrEnabled() { + return acctest.RandString(length) + } + envPath := os.Getenv("VCR_PATH") + vcrMode := os.Getenv("VCR_MODE") + s, err := vcrSource(t, envPath, vcrMode) + if err != nil { + // At this point we haven't created any resources, so fail fast + t.Fatal(err) + } + + r := rand.New(s.source) + result := make([]byte, length) + set := "abcdefghijklmnopqrstuvwxyz012346789" + for i := 0; i < length; i++ { + result[i] = set[r.Intn(len(set))] + } + return string(result) +} + +func RandInt(t *testing.T) int { + if !IsVcrEnabled() { + return acctest.RandInt() + } + envPath := os.Getenv("VCR_PATH") + vcrMode := os.Getenv("VCR_MODE") + s, err := vcrSource(t, envPath, vcrMode) + if err != nil { + // At this point we haven't created any resources, so fail fast + t.Fatal(err) + } + + return rand.New(s.source).Int() +} + +// ProtoV5ProviderFactories returns a muxed ProviderServer that uses the provider code from this repo (SDK and plugin-framework). +// Used to set ProtoV5ProviderFactories in a resource.TestStep within an acceptance test. +func ProtoV5ProviderFactories(t *testing.T) map[string]func() (tfprotov5.ProviderServer, error) { + return map[string]func() (tfprotov5.ProviderServer, error){ + "google": func() (tfprotov5.ProviderServer, error) { + provider, err := MuxedProviders(t.Name()) + return provider(), err + }, + } +} + +// ProtoV5ProviderBetaFactories returns the same as ProtoV5ProviderFactories only the provider is mapped with +// "google-beta" to ensure that registry examples use `google-beta` if the example is versioned as beta; +// normal beta tests should continue to use ProtoV5ProviderFactories +func ProtoV5ProviderBetaFactories(t *testing.T) map[string]func() (tfprotov5.ProviderServer, error) { + return map[string]func() (tfprotov5.ProviderServer, error){ +{{/* Add a google-#{version} provider for each version that is supported by this version. This allows us to run google-beta tests within a google-alpha provider. */ -}} +{{ range $aliasedVersion := $.SupportedProviderVersions -}} + "google-{{ $aliasedVersion }}": func() (tfprotov5.ProviderServer, error) { + provider, err := MuxedProviders(t.Name()) + return provider(), err + }, +{{- end }} + } +} + +// This is a Printf sibling (Nprintf; Named Printf), which handles strings like +// Nprintf("Hello %{target}!", map[string]interface{}{"target":"world"}) == "Hello world!". +// This is particularly useful for generated tests, where we don't want to use Printf, +// since that would require us to generate a very particular ordering of arguments. +func Nprintf(format string, params map[string]interface{}) string { + for key, val := range params { + format = strings.Replace(format, "%{"+key+"}", fmt.Sprintf("%v", val), -1) + } + return format +} + +func TestBucketName(t *testing.T) string { + return fmt.Sprintf("%s-%d", "tf-test-bucket", RandInt(t)) +} + +func CreateZIPArchiveForCloudFunctionSource(t *testing.T, sourcePath string) string { + source, err := ioutil.ReadFile(sourcePath) + if err != nil { + t.Fatal(err.Error()) + } + // Create a buffer to write our archive to. + buf := new(bytes.Buffer) + + // Create a new zip archive. + w := zip.NewWriter(buf) + + f, err := w.Create("index.js") + if err != nil { + t.Fatal(err.Error()) + } + _, err = f.Write(source) + if err != nil { + t.Fatal(err.Error()) + } + + // Make sure to check the error on Close. + err = w.Close() + if err != nil { + t.Fatal(err.Error()) + } + // Create temp file to write zip to + tmpfile, err := ioutil.TempFile("", "sourceArchivePrefix") + if err != nil { + t.Fatal(err.Error()) + } + + if _, err := tmpfile.Write(buf.Bytes()); err != nil { + t.Fatal(err.Error()) + } + if err := tmpfile.Close(); err != nil { + t.Fatal(err.Error()) + } + return tmpfile.Name() +} + +// providerConfigEnvNames returns a list of all the environment variables that could be set by a user to configure the provider +func providerConfigEnvNames() []string { + + envs := []string{} + + // Use existing collections of ENV names + envVarsSets := [][]string{ + envvar.CredsEnvVars, // credentials field + envvar.ProjectEnvVars, // project field + envvar.RegionEnvVars, // region field + envvar.ZoneEnvVars, // zone field + } + for _, set := range envVarsSets { + envs = append(envs, set...) + } + + // Add remaining ENVs + envs = append(envs, "GOOGLE_OAUTH_ACCESS_TOKEN") // access_token field + envs = append(envs, "GOOGLE_BILLING_PROJECT") // billing_project field + envs = append(envs, "GOOGLE_IMPERSONATE_SERVICE_ACCOUNT") // impersonate_service_account field + envs = append(envs, "USER_PROJECT_OVERRIDE") // user_project_override field + envs = append(envs, "CLOUDSDK_CORE_REQUEST_REASON") // request_reason field + + envs = append(envs, "GOOGLE_APPLICATION_CREDENTIALS") // ADC used to configure clients when provider lacks credentials and access_token + + return envs +} + +// UnsetProviderConfigEnvs unsets any ENVs in the test environment that +// configure the provider. +// The testing package will restore the original values after the test +func UnsetTestProviderConfigEnvs(t *testing.T) { + envs := providerConfigEnvNames() + if len(envs) > 0 { + for _, k := range envs { + t.Setenv(k, "") + } + } +} + +func SetupTestEnvs(t *testing.T, envValues map[string]string) { + // Set ENVs + if len(envValues) > 0 { + for k, v := range envValues { + t.Setenv(k, v) + } + } +} + +// Returns a fake credentials JSON string with the client_email set to a test-specific value +func GenerateFakeCredentialsJson(testId string) string { + json := fmt.Sprintf(`{"private_key_id": "foo","private_key": "bar","client_email": "%s@example.com","client_id": "id@foo.com","type": "service_account"}`, testId) + return json +} diff --git a/mmv1/third_party/terraform/functions/location_from_id_test.go b/mmv1/third_party/terraform/functions/location_from_id_test.go index 26b73d2bc26b..209fc794ae9a 100644 --- a/mmv1/third_party/terraform/functions/location_from_id_test.go +++ b/mmv1/third_party/terraform/functions/location_from_id_test.go @@ -11,8 +11,6 @@ import ( func TestAccProviderFunction_location_from_id(t *testing.T) { t.Parallel() - // Skipping due to requiring TF 1.8.0 in VCR systems : https://github.com/hashicorp/terraform-provider-google/issues/17451 - acctest.SkipIfVcr(t) location := "us-central1" locationRegex := regexp.MustCompile(fmt.Sprintf("^%s$", location)) diff --git a/mmv1/third_party/terraform/functions/name_from_id_test.go b/mmv1/third_party/terraform/functions/name_from_id_test.go index 8eaf139918c3..f5084cb1d230 100644 --- a/mmv1/third_party/terraform/functions/name_from_id_test.go +++ b/mmv1/third_party/terraform/functions/name_from_id_test.go @@ -11,8 +11,6 @@ import ( func TestAccProviderFunction_name_from_id(t *testing.T) { t.Parallel() - // Skipping due to requiring TF 1.8.0 in VCR systems : https://github.com/hashicorp/terraform-provider-google/issues/17451 - acctest.SkipIfVcr(t) context := map[string]interface{}{ "function_name": "name_from_id", diff --git a/mmv1/third_party/terraform/functions/project_from_id_test.go b/mmv1/third_party/terraform/functions/project_from_id_test.go index 34f0f624af59..0677aa783747 100644 --- a/mmv1/third_party/terraform/functions/project_from_id_test.go +++ b/mmv1/third_party/terraform/functions/project_from_id_test.go @@ -12,8 +12,6 @@ import ( func TestAccProviderFunction_project_from_id(t *testing.T) { t.Parallel() - // Skipping due to requiring TF 1.8.0 in VCR systems : https://github.com/hashicorp/terraform-provider-google/issues/17451 - acctest.SkipIfVcr(t) projectId := envvar.GetTestProjectFromEnv() projectIdRegex := regexp.MustCompile(fmt.Sprintf("^%s$", projectId)) diff --git a/mmv1/third_party/terraform/functions/region_from_id_test.go b/mmv1/third_party/terraform/functions/region_from_id_test.go index 79ef135d9a27..96f6661c38fb 100644 --- a/mmv1/third_party/terraform/functions/region_from_id_test.go +++ b/mmv1/third_party/terraform/functions/region_from_id_test.go @@ -12,8 +12,6 @@ import ( func TestAccProviderFunction_region_from_id(t *testing.T) { t.Parallel() - // Skipping due to requiring TF 1.8.0 in VCR systems : https://github.com/hashicorp/terraform-provider-google/issues/17451 - acctest.SkipIfVcr(t) region := envvar.GetTestRegionFromEnv() regionRegex := regexp.MustCompile(fmt.Sprintf("^%s$", region)) diff --git a/mmv1/third_party/terraform/functions/region_from_zone.go b/mmv1/third_party/terraform/functions/region_from_zone.go index d1ba3104e88c..1a75b751169b 100644 --- a/mmv1/third_party/terraform/functions/region_from_zone.go +++ b/mmv1/third_party/terraform/functions/region_from_zone.go @@ -3,6 +3,7 @@ package functions import ( "context" "fmt" + "strings" "github.com/hashicorp/terraform-plugin-framework/function" ) @@ -35,23 +36,24 @@ func (f RegionFromZoneFunction) Definition(ctx context.Context, req function.Def func (f RegionFromZoneFunction) Run(ctx context.Context, req function.RunRequest, resp *function.RunResponse) { // Load arguments from function call - var arg0 string - resp.Error = function.ConcatFuncErrors(req.Arguments.GetArgument(ctx, 0, &arg0)) + var zone string + resp.Error = function.ConcatFuncErrors(req.Arguments.GetArgument(ctx, 0, &zone)) if resp.Error != nil { return } - if arg0 == "" { + if zone == "" { err := function.NewArgumentFuncError(0, "The input string cannot be empty.") resp.Error = function.ConcatFuncErrors(err) return } - if arg0[len(arg0)-2] != '-' { - err := function.NewArgumentFuncError(0, fmt.Sprintf("The input string \"%s\" is not a valid zone name.", arg0)) + zoneParts := strings.Split(zone, "-") + + if len(zoneParts) < 3 { + err := function.NewArgumentFuncError(0, fmt.Sprintf("The input string \"%s\" is not a valid zone name.", zone)) resp.Error = function.ConcatFuncErrors(err) - return + } else { + resp.Error = function.ConcatFuncErrors(resp.Result.Set(ctx, strings.Join(zoneParts[:len(zoneParts)-1], "-"))) } - - resp.Error = function.ConcatFuncErrors(resp.Result.Set(ctx, arg0[:len(arg0)-2])) } diff --git a/mmv1/third_party/terraform/functions/region_from_zone_test.go b/mmv1/third_party/terraform/functions/region_from_zone_test.go index 68c001ada13a..5ce3679d104f 100644 --- a/mmv1/third_party/terraform/functions/region_from_zone_test.go +++ b/mmv1/third_party/terraform/functions/region_from_zone_test.go @@ -11,8 +11,7 @@ import ( func TestAccProviderFunction_region_from_zone(t *testing.T) { t.Parallel() - // Skipping due to requiring TF 1.8.0 in VCR systems : https://github.com/hashicorp/terraform-provider-google/issues/17451 - acctest.SkipIfVcr(t) + projectZone := "us-central1-a" projectRegion := "us-central1" projectRegionRegex := regexp.MustCompile(fmt.Sprintf("^%s$", projectRegion)) diff --git a/mmv1/third_party/terraform/functions/zone_from_id_test.go b/mmv1/third_party/terraform/functions/zone_from_id_test.go index 7058825a05bc..45247ef1aee0 100644 --- a/mmv1/third_party/terraform/functions/zone_from_id_test.go +++ b/mmv1/third_party/terraform/functions/zone_from_id_test.go @@ -12,8 +12,6 @@ import ( func TestAccProviderFunction_zone_from_id(t *testing.T) { t.Parallel() - // Skipping due to requiring TF 1.8.0 in VCR systems : https://github.com/hashicorp/terraform-provider-google/issues/17451 - acctest.SkipIfVcr(t) zone := envvar.GetTestZoneFromEnv() zoneRegex := regexp.MustCompile(fmt.Sprintf("^%s$", zone)) diff --git a/mmv1/third_party/terraform/fwprovider/go/framework_provider_test.go.tmpl b/mmv1/third_party/terraform/fwprovider/go/framework_provider_test.go.tmpl new file mode 100644 index 000000000000..c5a53d10d2ec --- /dev/null +++ b/mmv1/third_party/terraform/fwprovider/go/framework_provider_test.go.tmpl @@ -0,0 +1,286 @@ +package fwprovider_test + +import ( + "fmt" + "regexp" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccFrameworkProviderMeta_setModuleName(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14158 + acctest.SkipIfVcr(t) + t.Parallel() + + moduleName := "my-module" + managedZoneName := fmt.Sprintf("tf-test-zone-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducerFramework(t), + Steps: []resource.TestStep{ + { + Config: testAccFrameworkProviderMeta_setModuleName(moduleName, managedZoneName, acctest.RandString(t, 10)), + }, + }, + }) +} + +func TestAccFrameworkProviderBasePath_setInvalidBasePath(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + CheckDestroy: testAccCheckComputeAddressDestroyProducer(t), + Steps: []resource.TestStep{ + { + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "4.58.0", + Source: "hashicorp/google{{- if ne $.TargetVersionName "ga" -}}-{{$.TargetVersionName}}{{- end }}", + }, + }, + Config: testAccProviderBasePath_setBasePath("https://www.example.com/compute/beta/", acctest.RandString(t, 10)), + ExpectError: regexp.MustCompile("got HTTP response code 404 with body"), + }, + { + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Config: testAccProviderBasePath_setBasePath("https://www.example.com/compute/beta/", acctest.RandString(t, 10)), + ExpectError: regexp.MustCompile("got HTTP response code 404 with body"), + }, + }, + }) +} + +func TestAccFrameworkProviderBasePath_setBasePath(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14158 + acctest.SkipIfVcr(t) + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducerFramework(t), + Steps: []resource.TestStep{ + { + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "4.58.0", + Source: "hashicorp/google{{- if ne $.TargetVersionName "ga" -}}-{{$.TargetVersionName}}{{- end }}", + }, + }, + Config: testAccFrameworkProviderBasePath_setBasePath("https://www.googleapis.com/dns/v1beta2/", acctest.RandString(t, 10)), + }, + { + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "4.58.0", + Source: "hashicorp/google{{- if ne $.TargetVersionName "ga" -}}-{{$.TargetVersionName}}{{- end }}", + }, + }, + ResourceName: "google_dns_managed_zone.foo", + ImportState: true, + ImportStateVerify: true, + }, + { + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Config: testAccFrameworkProviderBasePath_setBasePath("https://www.googleapis.com/dns/v1beta2/", acctest.RandString(t, 10)), + }, + { + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ResourceName: "google_dns_managed_zone.foo", + ImportState: true, + ImportStateVerify: true, + }, + { + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Config: testAccFrameworkProviderBasePath_setBasePathstep3("https://www.googleapis.com/dns/v1beta2/", acctest.RandString(t, 10)), + }, + }, + }) +} + +func testAccFrameworkProviderMeta_setModuleName(key, managedZoneName, recordSetName string) string { + return fmt.Sprintf(` +terraform { + provider_meta "google" { + module_name = "%s" + } +} + +provider "google" {} + +resource "google_dns_managed_zone" "zone" { + name = "%s-hashicorptest-com" + dns_name = "%s.hashicorptest.com." +} + +resource "google_dns_record_set" "rs" { + managed_zone = google_dns_managed_zone.zone.name + name = "%s.${google_dns_managed_zone.zone.dns_name}" + type = "A" + ttl = 300 + rrdatas = [ + "192.168.1.0", + ] +} + +data "google_dns_record_set" "rs" { + managed_zone = google_dns_record_set.rs.managed_zone + name = google_dns_record_set.rs.name + type = google_dns_record_set.rs.type +}`, key, managedZoneName, managedZoneName, recordSetName) +} + +func testAccFrameworkProviderBasePath_setBasePath(endpoint, name string) string { + return fmt.Sprintf(` +provider "google" { + alias = "dns_custom_endpoint" + dns_custom_endpoint = "%s" +} + +resource "google_dns_managed_zone" "foo" { + provider = google.dns_custom_endpoint + name = "tf-test-zone-%s" + dns_name = "tf-test-zone-%s.hashicorptest.com." + description = "QA DNS zone" +} + +data "google_dns_managed_zone" "qa" { + provider = google.dns_custom_endpoint + name = google_dns_managed_zone.foo.name +}`, endpoint, name, name) +} + +func testAccFrameworkProviderBasePath_setBasePathstep3(endpoint, name string) string { + return fmt.Sprintf(` +provider "google" { + alias = "dns_custom_endpoint" + dns_custom_endpoint = "%s" +} + +resource "google_dns_managed_zone" "foo" { + provider = google.dns_custom_endpoint + name = "tf-test-zone-%s" + dns_name = "tf-test-zone-%s.hashicorptest.com." + description = "QA DNS zone" +} +`, endpoint, name, name) +} + + +// Copy the function from the provider_test package to here +// as that function is in the _test.go file and not importable +func testAccProviderBasePath_setBasePath(endpoint, name string) string { + return fmt.Sprintf(` +provider "google" { + alias = "compute_custom_endpoint" + compute_custom_endpoint = "%s" +} + +resource "google_compute_address" "default" { + provider = google.compute_custom_endpoint + name = "tf-test-address-%s" +}`, endpoint, name) +} + +func testAccProviderMeta_setModuleName(key, name string) string { + return fmt.Sprintf(` +terraform { + provider_meta "google" { + module_name = "%s" + } +} + +resource "google_compute_address" "default" { + name = "tf-test-address-%s" +}`, key, name) +} + +// Copy the function testAccCheckComputeAddressDestroyProducer from the dns_test package to here, +// as that function is in the _test.go file and not importable. +// +// testAccCheckDNSManagedZoneDestroyProducerFramework is the framework version of the generated testAccCheckDNSManagedZoneDestroyProducer +// when we automate this, we'll use the automated version and can get rid of this +func testAccCheckDNSManagedZoneDestroyProducerFramework(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_dns_managed_zone" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + p := acctest.GetFwTestProvider(t) + + url, err := fwresource.ReplaceVarsForFrameworkTest(&p.FrameworkProvider.FrameworkProviderConfig, rs, "{{"{{"}}DNSBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/managedZones/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if !p.BillingProject.IsNull() && p.BillingProject.String() != "" { + billingProject = p.BillingProject.String() + } + + _, diags := fwtransport.SendFrameworkRequest(&p.FrameworkProvider.FrameworkProviderConfig, "GET", billingProject, url, p.UserAgent, nil) + if !diags.HasError() { + return fmt.Errorf("DNSManagedZone still exists at %s", url) + } + } + + return nil + } +} + +// Copy the Mmv1 generated function testAccCheckComputeAddressDestroyProducer from the compute_test package to here, +// as that function is in the _test.go file and not importable. +func testAccCheckComputeAddressDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_compute_address" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/regions/{{"{{"}}region{{"}}"}}/addresses/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("ComputeAddress still exists at %s", url) + } + } + + return nil + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/fwtransport/go/framework_config_test.go b/mmv1/third_party/terraform/fwtransport/go/framework_config_test.go new file mode 100644 index 000000000000..a5f7a186dd4d --- /dev/null +++ b/mmv1/third_party/terraform/fwtransport/go/framework_config_test.go @@ -0,0 +1,1743 @@ +package fwtransport_test + +import ( + "context" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestFrameworkProvider_LoadAndValidateFramework_project(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under test experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue // Sometimes the value is mutated, and no longer matches the original value we supply + ExpectedConfigStructValue basetypes.StringValue // Sometimes the value in config struct differs from what is in the data model + ExpectError bool + }{ + "project value set in the provider schema is not overridden by environment variables": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringValue("project-from-config"), + }, + EnvVariables: map[string]string{ + "GOOGLE_PROJECT": "project-from-GOOGLE_PROJECT", + "GOOGLE_CLOUD_PROJECT": "project-from-GOOGLE_CLOUD_PROJECT", + "GCLOUD_PROJECT": "project-from-GCLOUD_PROJECT", + "CLOUDSDK_CORE_PROJECT": "project-from-CLOUDSDK_CORE_PROJECT", + }, + ExpectedDataModelValue: types.StringValue("project-from-config"), + ExpectedConfigStructValue: types.StringValue("project-from-config"), + }, + "project value can be set by environment variable: GOOGLE_PROJECT is used first": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringNull(), // unset + }, + EnvVariables: map[string]string{ + "GOOGLE_PROJECT": "project-from-GOOGLE_PROJECT", + "GOOGLE_CLOUD_PROJECT": "project-from-GOOGLE_CLOUD_PROJECT", + "GCLOUD_PROJECT": "project-from-GCLOUD_PROJECT", + "CLOUDSDK_CORE_PROJECT": "project-from-CLOUDSDK_CORE_PROJECT", + }, + ExpectedDataModelValue: types.StringValue("project-from-GOOGLE_PROJECT"), + ExpectedConfigStructValue: types.StringValue("project-from-GOOGLE_PROJECT"), + }, + "project value can be set by environment variable: GOOGLE_CLOUD_PROJECT is used second": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringNull(), // unset + }, + EnvVariables: map[string]string{ + // GOOGLE_PROJECT unset + "GOOGLE_CLOUD_PROJECT": "project-from-GOOGLE_CLOUD_PROJECT", + "GCLOUD_PROJECT": "project-from-GCLOUD_PROJECT", + "CLOUDSDK_CORE_PROJECT": "project-from-CLOUDSDK_CORE_PROJECT", + }, + ExpectedDataModelValue: types.StringValue("project-from-GOOGLE_CLOUD_PROJECT"), + ExpectedConfigStructValue: types.StringValue("project-from-GOOGLE_CLOUD_PROJECT"), + }, + "project value can be set by environment variable: GCLOUD_PROJECT is used third": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringNull(), // unset + }, + EnvVariables: map[string]string{ + // GOOGLE_PROJECT unset + // GOOGLE_CLOUD_PROJECT unset + "GCLOUD_PROJECT": "project-from-GCLOUD_PROJECT", + "CLOUDSDK_CORE_PROJECT": "project-from-CLOUDSDK_CORE_PROJECT", + }, + ExpectedDataModelValue: types.StringValue("project-from-GCLOUD_PROJECT"), + ExpectedConfigStructValue: types.StringValue("project-from-GCLOUD_PROJECT"), + }, + "project value can be set by environment variable: CLOUDSDK_CORE_PROJECT is used fourth": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringNull(), // unset + }, + EnvVariables: map[string]string{ + // GOOGLE_PROJECT unset + // GOOGLE_CLOUD_PROJECT unset + // GCLOUD_PROJECT unset + "CLOUDSDK_CORE_PROJECT": "project-from-CLOUDSDK_CORE_PROJECT", + }, + ExpectedDataModelValue: types.StringValue("project-from-CLOUDSDK_CORE_PROJECT"), + ExpectedConfigStructValue: types.StringValue("project-from-CLOUDSDK_CORE_PROJECT"), + }, + "when no project values are provided via config or environment variables, the field remains unset without error": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringNull(), // unset + }, + ExpectedDataModelValue: types.StringNull(), + ExpectedConfigStructValue: types.StringNull(), + }, + // Handling empty strings in config + "when project is set as an empty string the empty string is used and not ignored": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringValue(""), + }, + ExpectedDataModelValue: types.StringValue(""), + ExpectedConfigStructValue: types.StringValue(""), + }, + "when project is set as an empty string, the empty string is not ignored in favor of an environment variable": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringValue(""), + }, + EnvVariables: map[string]string{ + "GOOGLE_PROJECT": "project-from-GOOGLE_PROJECT", + }, + ExpectedDataModelValue: types.StringValue(""), + ExpectedConfigStructValue: types.StringValue(""), + }, + // Handling unknown values + "when project is an unknown value, the provider treats it as if it's unset and uses an environment variable instead": { + ConfigValues: fwmodels.ProviderModel{ + Project: types.StringUnknown(), + }, + EnvVariables: map[string]string{ + "GOOGLE_PROJECT": "project-from-GOOGLE_PROJECT", + }, + ExpectedDataModelValue: types.StringValue("project-from-GOOGLE_PROJECT"), + ExpectedConfigStructValue: types.StringValue("project-from-GOOGLE_PROJECT"), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.Project.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want project in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.Project.String()) + } + // Checking the value passed to the config structs + if !p.Project.Equal(tc.ExpectedConfigStructValue) { + t.Fatalf("want project in the `FrameworkProviderConfig` struct to be `%s`, but got the value `%s`", tc.ExpectedConfigStructValue, p.Project.String()) + } + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_credentials(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under test experiencing errors, and could be addressed in future refactoring. + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + const pathToMissingFile string = "./this/path/doesnt/exist.json" // Doesn't exist + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue + // ExpectedConfigStructValue not used here, as credentials info isn't stored in the config struct + ExpectError bool + }{ + "credentials can be configured as a path to a credentials JSON file": { + ConfigValues: fwmodels.ProviderModel{ + Credentials: types.StringValue(transport_tpg.TestFakeCredentialsPath), + }, + ExpectedDataModelValue: types.StringValue(transport_tpg.TestFakeCredentialsPath), + }, + "configuring credentials as a path to a non-existent file results in an error": { + ConfigValues: fwmodels.ProviderModel{ + Credentials: types.StringValue(pathToMissingFile), + }, + ExpectError: true, + }, + "credentials set in the config are not overridden by environment variables": { + ConfigValues: fwmodels.ProviderModel{ + Credentials: types.StringValue(acctest.GenerateFakeCredentialsJson("test")), + }, + EnvVariables: map[string]string{ + "GOOGLE_CREDENTIALS": acctest.GenerateFakeCredentialsJson("GOOGLE_CREDENTIALS"), + "GOOGLE_CLOUD_KEYFILE_JSON": acctest.GenerateFakeCredentialsJson("GOOGLE_CLOUD_KEYFILE_JSON"), + "GCLOUD_KEYFILE_JSON": acctest.GenerateFakeCredentialsJson("GCLOUD_KEYFILE_JSON"), + "GOOGLE_APPLICATION_CREDENTIALS": acctest.GenerateFakeCredentialsJson("GOOGLE_APPLICATION_CREDENTIALS"), + }, + ExpectedDataModelValue: types.StringValue(acctest.GenerateFakeCredentialsJson("test")), + }, + "when credentials is unset in the config, environment variables are used: GOOGLE_CREDENTIALS used first": { + ConfigValues: fwmodels.ProviderModel{ + Credentials: types.StringNull(), // unset + }, + EnvVariables: map[string]string{ + "GOOGLE_CREDENTIALS": acctest.GenerateFakeCredentialsJson("GOOGLE_CREDENTIALS"), + "GOOGLE_CLOUD_KEYFILE_JSON": acctest.GenerateFakeCredentialsJson("GOOGLE_CLOUD_KEYFILE_JSON"), + "GCLOUD_KEYFILE_JSON": acctest.GenerateFakeCredentialsJson("GCLOUD_KEYFILE_JSON"), + "GOOGLE_APPLICATION_CREDENTIALS": acctest.GenerateFakeCredentialsJson("GOOGLE_APPLICATION_CREDENTIALS"), + }, + ExpectedDataModelValue: types.StringValue(acctest.GenerateFakeCredentialsJson("GOOGLE_CREDENTIALS")), + }, + "when credentials is unset in the config, environment variables are used: GOOGLE_CLOUD_KEYFILE_JSON used second": { + ConfigValues: fwmodels.ProviderModel{ + Credentials: types.StringNull(), // unset + }, + EnvVariables: map[string]string{ + // GOOGLE_CREDENTIALS not set + "GOOGLE_CLOUD_KEYFILE_JSON": acctest.GenerateFakeCredentialsJson("GOOGLE_CLOUD_KEYFILE_JSON"), + "GCLOUD_KEYFILE_JSON": acctest.GenerateFakeCredentialsJson("GCLOUD_KEYFILE_JSON"), + "GOOGLE_APPLICATION_CREDENTIALS": acctest.GenerateFakeCredentialsJson("GOOGLE_APPLICATION_CREDENTIALS"), + }, + ExpectedDataModelValue: types.StringValue(acctest.GenerateFakeCredentialsJson("GOOGLE_CLOUD_KEYFILE_JSON")), + }, + "when credentials is unset in the config, environment variables are used: GCLOUD_KEYFILE_JSON used third": { + ConfigValues: fwmodels.ProviderModel{ + Credentials: types.StringNull(), // unset + }, + EnvVariables: map[string]string{ + // GOOGLE_CREDENTIALS not set + // GOOGLE_CLOUD_KEYFILE_JSON not set + "GCLOUD_KEYFILE_JSON": acctest.GenerateFakeCredentialsJson("GCLOUD_KEYFILE_JSON"), + "GOOGLE_APPLICATION_CREDENTIALS": acctest.GenerateFakeCredentialsJson("GOOGLE_APPLICATION_CREDENTIALS"), + }, + ExpectedDataModelValue: types.StringValue(acctest.GenerateFakeCredentialsJson("GCLOUD_KEYFILE_JSON")), + }, + "when credentials is unset in the config (and access_token unset), GOOGLE_APPLICATION_CREDENTIALS is used for auth but not to set values in the config": { + ConfigValues: fwmodels.ProviderModel{ + Credentials: types.StringNull(), // unset + }, + EnvVariables: map[string]string{ + // GOOGLE_CREDENTIALS not set + // GOOGLE_CLOUD_KEYFILE_JSON not set + // GCLOUD_KEYFILE_JSON not set + "GOOGLE_APPLICATION_CREDENTIALS": transport_tpg.TestFakeCredentialsPath, // needs to be a path to a file when used by code + }, + ExpectedDataModelValue: types.StringNull(), + }, + // Error states + "when credentials is set to an empty string in the config the value isn't ignored and results in an error": { + ConfigValues: fwmodels.ProviderModel{ + Credentials: types.StringValue(""), + }, + EnvVariables: map[string]string{ + "GOOGLE_APPLICATION_CREDENTIALS": transport_tpg.TestFakeCredentialsPath, // needs to be a path to a file when used by code + }, + ExpectError: true, + }, + // NOTE: these tests can't run in Cloud Build due to ADC locating credentials despite `GOOGLE_APPLICATION_CREDENTIALS` being unset + // See https://cloud.google.com/docs/authentication/application-default-credentials#search_order + // Also, when running these tests locally you need to run `gcloud auth application-default revoke` to ensure your machine isn't supplying ADCs + // "error returned if credentials is set as an empty string and GOOGLE_APPLICATION_CREDENTIALS is unset": { + // ConfigValues: fwmodels.ProviderModel{ + // Credentials: types.StringValue(""), + // }, + // EnvVariables: map[string]string{ + // "GOOGLE_APPLICATION_CREDENTIALS": "", + // }, + // ExpectError: true, + // }, + // "error returned if neither credentials nor access_token set in the provider config, and GOOGLE_APPLICATION_CREDENTIALS is unset": { + // EnvVariables: map[string]string{ + // "GOOGLE_APPLICATION_CREDENTIALS": "", + // }, + // ExpectError: true, + // }, + // Handling unknown values - see separate `TestFrameworkProvider_LoadAndValidateFramework_credentials_unknown` test + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + if !data.Credentials.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want credentials to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.Credentials.String()) + } + // fwtransport.FrameworkProviderConfig does not store the credentials info, so test does not make assertions on config struct + }) + } +} + +// NOTE: these tests can't run in Cloud Build due to ADC locating credentials despite `GOOGLE_APPLICATION_CREDENTIALS` being unset +// See https://cloud.google.com/docs/authentication/application-default-credentials#search_order +// Also, when running these tests locally you need to run `gcloud auth application-default revoke` to ensure your machine isn't supplying ADCs +// func TestFrameworkProvider_LoadAndValidateFramework_credentials_unknown(t *testing.T) { +// // This test case is kept separate from other credentials tests, as it requires comparing +// // error messages returned by two different error states: +// // - When credentials = Null +// // - When credentials = Unknown + +// t.Run("the same error is returned whether credentials is set as a null or unknown value (and access_token isn't set)", func(t *testing.T) { +// // Arrange +// acctest.UnsetTestProviderConfigEnvs(t) + +// ctx := context.Background() +// tfVersion := "foobar" +// providerversion := "999" + +// impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + +// // Null data and error collection +// diagsNull := diag.Diagnostics{} +// dataNull := fwmodels.ProviderModel{ +// Credentials: types.StringNull(), +// } +// dataNull.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + +// // Unknown data and error collection +// diagsUnknown := diag.Diagnostics{} +// dataUnknown := fwmodels.ProviderModel{ +// Credentials: types.StringUnknown(), +// } +// dataUnknown.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + +// pNull := fwtransport.FrameworkProviderConfig{} +// pUnknown := fwtransport.FrameworkProviderConfig{} + +// // Act +// pNull.LoadAndValidateFramework(ctx, &dataNull, tfVersion, &diagsNull, providerversion) +// pUnknown.LoadAndValidateFramework(ctx, &dataUnknown, tfVersion, &diagsUnknown, providerversion) + +// // Assert +// if !diagsNull.HasError() { +// t.Fatalf("expect errors when credentials is null, but [%d] errors occurred", diagsNull.ErrorsCount()) +// } +// if !diagsUnknown.HasError() { +// t.Fatalf("expect errors when credentials is unknown, but [%d] errors occurred", diagsUnknown.ErrorsCount()) +// } + +// errNull := diagsNull.Errors() +// errUnknown := diagsUnknown.Errors() +// for i := 0; i < len(errNull); i++ { +// if errNull[i] != errUnknown[i] { +// t.Fatalf("expect errors to be the same for null and unknown credentials values, instead got \nnull=`%s` \nunknown=%s", errNull[i], errUnknown[i]) +// } +// } +// }) +// } + +func TestFrameworkProvider_LoadAndValidateFramework_billingProject(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under test experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue + ExpectedConfigStructValue basetypes.StringValue + ExpectError bool + }{ + "billing_project value set in the provider schema is not overridden by environment variables": { + ConfigValues: fwmodels.ProviderModel{ + BillingProject: types.StringValue("billing-project-from-config"), + }, + EnvVariables: map[string]string{ + "GOOGLE_BILLING_PROJECT": "billing-project-from-env", + }, + ExpectedDataModelValue: types.StringValue("billing-project-from-config"), + ExpectedConfigStructValue: types.StringValue("billing-project-from-config"), + }, + "billing_project can be set by environment variable, when no value supplied via the config": { + ConfigValues: fwmodels.ProviderModel{ + BillingProject: types.StringNull(), + }, + EnvVariables: map[string]string{ + "GOOGLE_BILLING_PROJECT": "billing-project-from-env", + }, + ExpectedDataModelValue: types.StringValue("billing-project-from-env"), + ExpectedConfigStructValue: types.StringValue("billing-project-from-env"), + }, + "when no billing_project values are provided via config or environment variables, the field remains unset without error": { + ConfigValues: fwmodels.ProviderModel{ + BillingProject: types.StringNull(), + }, + ExpectedDataModelValue: types.StringNull(), + ExpectedConfigStructValue: types.StringNull(), + }, + // Handling empty strings in config + "when billing_project is set as an empty string the empty string is used and not ignored": { + ConfigValues: fwmodels.ProviderModel{ + BillingProject: types.StringValue(""), + }, + ExpectedDataModelValue: types.StringValue(""), + ExpectedConfigStructValue: types.StringValue(""), + }, + "when billing_project is set as an empty string, the empty string is not ignored in favor of an environment variable": { + ConfigValues: fwmodels.ProviderModel{ + BillingProject: types.StringValue(""), + }, + EnvVariables: map[string]string{ + "GOOGLE_BILLING_PROJECT": "billing-project-from-env", + }, + ExpectedDataModelValue: types.StringValue(""), + ExpectedConfigStructValue: types.StringValue(""), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + data := tc.ConfigValues + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.BillingProject.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want billing_project in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.BillingProject.String()) + } + // Checking the value passed to the config structs + if !p.BillingProject.Equal(tc.ExpectedConfigStructValue) { + t.Fatalf("want billing_project in the `FrameworkProviderConfig` struct to be `%s`, but got the value `%s`", tc.ExpectedConfigStructValue, p.BillingProject.String()) + } + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_region(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under test experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue + ExpectedConfigStructValue basetypes.StringValue + ExpectError bool + }{ + "region value set in the provider config is not overridden by ENVs": { + ConfigValues: fwmodels.ProviderModel{ + Region: types.StringValue("region-from-config"), + }, + EnvVariables: map[string]string{ + "GOOGLE_REGION": "region-from-env", + }, + ExpectedDataModelValue: types.StringValue("region-from-config"), + ExpectedConfigStructValue: types.StringValue("region-from-config"), + }, + "region values can be supplied as a self link": { + ConfigValues: fwmodels.ProviderModel{ + Region: types.StringValue("https://www.googleapis.com/compute/v1/projects/my-project/regions/us-central1"), + }, + ExpectedDataModelValue: types.StringValue("https://www.googleapis.com/compute/v1/projects/my-project/regions/us-central1"), + ExpectedConfigStructValue: types.StringValue("us-central1"), + }, + "region value can be set by environment variable: GOOGLE_REGION is used": { + ConfigValues: fwmodels.ProviderModel{ + Region: types.StringNull(), + }, + EnvVariables: map[string]string{ + "GOOGLE_REGION": "region-from-env", + }, + ExpectedDataModelValue: types.StringValue("region-from-env"), + ExpectedConfigStructValue: types.StringValue("region-from-env"), + }, + "when no region values are provided via config or environment variables, the field remains unset without error": { + ConfigValues: fwmodels.ProviderModel{ + Region: types.StringNull(), + }, + ExpectedDataModelValue: types.StringNull(), + ExpectedConfigStructValue: types.StringNull(), + }, + // Handling empty strings in config + "when region is set as an empty string the empty string is used and not ignored": { + ConfigValues: fwmodels.ProviderModel{ + Region: types.StringValue(""), + }, + ExpectedDataModelValue: types.StringValue(""), + ExpectedConfigStructValue: types.StringValue(""), + }, + "when region is set as an empty string, the empty string is not ignored in favor of an environment variable": { + ConfigValues: fwmodels.ProviderModel{ + Region: types.StringValue(""), + }, + EnvVariables: map[string]string{ + "GOOGLE_REGION": "region-from-env", + }, + ExpectedDataModelValue: types.StringValue(""), + ExpectedConfigStructValue: types.StringValue(""), + }, + // Handling unknown values + "when region is an unknown value, the provider treats it as if it's unset and uses an environment variable instead": { + ConfigValues: fwmodels.ProviderModel{ + Region: types.StringUnknown(), + }, + EnvVariables: map[string]string{ + "GOOGLE_REGION": "region-from-env", + }, + ExpectedDataModelValue: types.StringValue("region-from-env"), + ExpectedConfigStructValue: types.StringValue("region-from-env"), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.Region.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want region in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.Region.String()) + } + // Checking the value passed to the config structs + if !p.Region.Equal(tc.ExpectedConfigStructValue) { + t.Fatalf("want region in the `FrameworkProviderConfig` struct to be `%s`, but got the value `%s`", tc.ExpectedConfigStructValue, p.Region.String()) + } + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_zone(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under test experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue + ExpectedConfigStructValue basetypes.StringValue + ExpectError bool + }{ + "zone value set in the provider config is not overridden by ENVs": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringValue("zone-from-config"), + }, + EnvVariables: map[string]string{ + "GOOGLE_ZONE": "zone-from-env", + }, + ExpectedDataModelValue: types.StringValue("zone-from-config"), + ExpectedConfigStructValue: types.StringValue("zone-from-config"), + }, + "does not shorten zone values when provided as a self link": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringValue("https://www.googleapis.com/compute/v1/projects/my-project/zones/us-central1"), + }, + ExpectedDataModelValue: types.StringValue("https://www.googleapis.com/compute/v1/projects/my-project/zones/us-central1"), + ExpectedConfigStructValue: types.StringValue("https://www.googleapis.com/compute/v1/projects/my-project/zones/us-central1"), // Value is not shortened from URI to name + }, + "when multiple zone environment variables are provided, `GOOGLE_ZONE` is used first": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringNull(), + }, + EnvVariables: map[string]string{ + "GOOGLE_ZONE": "zone-from-GOOGLE_ZONE", + "GCLOUD_ZONE": "zone-from-GCLOUD_ZONE", + "CLOUDSDK_COMPUTE_ZONE": "zone-from-CLOUDSDK_COMPUTE_ZONE", + }, + ExpectedDataModelValue: types.StringValue("zone-from-GOOGLE_ZONE"), + ExpectedConfigStructValue: types.StringValue("zone-from-GOOGLE_ZONE"), + }, + "when multiple zone environment variables are provided, `GCLOUD_ZONE` is used second": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringNull(), + }, + EnvVariables: map[string]string{ + // GOOGLE_ZONE unset + "GCLOUD_ZONE": "zone-from-GCLOUD_ZONE", + "CLOUDSDK_COMPUTE_ZONE": "zone-from-CLOUDSDK_COMPUTE_ZONE", + }, + ExpectedDataModelValue: types.StringValue("zone-from-GCLOUD_ZONE"), + ExpectedConfigStructValue: types.StringValue("zone-from-GCLOUD_ZONE"), + }, + "when multiple zone environment variables are provided, `CLOUDSDK_COMPUTE_ZONE` is used third": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringNull(), + }, + EnvVariables: map[string]string{ + // GOOGLE_ZONE unset + // GCLOUD_ZONE unset + "CLOUDSDK_COMPUTE_ZONE": "zone-from-CLOUDSDK_COMPUTE_ZONE", + }, + ExpectedDataModelValue: types.StringValue("zone-from-CLOUDSDK_COMPUTE_ZONE"), + ExpectedConfigStructValue: types.StringValue("zone-from-CLOUDSDK_COMPUTE_ZONE"), + }, + "when no zone values are provided via config or environment variables, the field remains unset without error": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringNull(), + }, + ExpectedDataModelValue: types.StringNull(), + ExpectedConfigStructValue: types.StringNull(), + }, + // Handling empty strings in config + "when zone is set as an empty string the empty string is used and not ignored": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringValue(""), + }, + ExpectedDataModelValue: types.StringValue(""), + ExpectedConfigStructValue: types.StringValue(""), + }, + "when zone is set as an empty string, the empty string is not ignored in favor of an environment variable": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringValue(""), + }, + EnvVariables: map[string]string{ + "GOOGLE_ZONE": "zone-from-env", + }, + ExpectedDataModelValue: types.StringValue(""), + ExpectedConfigStructValue: types.StringValue(""), + }, + // Handling unknown values + "when zone is an unknown value, the provider treats it as if it's unset and uses an environment variable instead": { + ConfigValues: fwmodels.ProviderModel{ + Zone: types.StringUnknown(), + }, + EnvVariables: map[string]string{ + "GOOGLE_ZONE": "zone-from-env", + }, + ExpectedDataModelValue: types.StringValue("zone-from-env"), + ExpectedConfigStructValue: types.StringValue("zone-from-env"), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.Zone.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want zone in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.Zone.String()) + } + // Checking the value passed to the config structs + if !p.Zone.Equal(tc.ExpectedConfigStructValue) { + t.Fatalf("want zone in the `FrameworkProviderConfig` struct to be `%s`, but got the value `%s`", tc.ExpectedConfigStructValue, p.Zone.String()) + } + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_accessToken(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under tests experiencing errors, and could be addressed in future refactoring. + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue // Sometimes the value is mutated, and no longer matches the original value we supply + // ExpectedConfigStructValue not used here, as credentials info isn't stored in the config struct + ExpectError bool + }{ + "access_token configured in the provider can be invalid without resulting in errors": { + ConfigValues: fwmodels.ProviderModel{ + AccessToken: types.StringValue("This is not a valid token string"), + }, + ExpectedDataModelValue: types.StringValue("This is not a valid token string"), + }, + "access_token set in the provider config is not overridden by environment variables": { + ConfigValues: fwmodels.ProviderModel{ + AccessToken: types.StringValue("value-from-config"), + }, + EnvVariables: map[string]string{ + "GOOGLE_OAUTH_ACCESS_TOKEN": "value-from-env", + }, + ExpectedDataModelValue: types.StringValue("value-from-config"), + }, + "when access_token is unset in the config, the GOOGLE_OAUTH_ACCESS_TOKEN environment variable is used": { + EnvVariables: map[string]string{ + "GOOGLE_OAUTH_ACCESS_TOKEN": "value-from-GOOGLE_OAUTH_ACCESS_TOKEN", + }, + ExpectedDataModelValue: types.StringValue("value-from-GOOGLE_OAUTH_ACCESS_TOKEN"), + }, + "when no access_token values are provided via config or environment variables there's no error (as long as credentials supplied in its absence)": { + ConfigValues: fwmodels.ProviderModel{ + AccessToken: types.StringNull(), + Credentials: types.StringValue(transport_tpg.TestFakeCredentialsPath), + }, + ExpectedDataModelValue: types.StringNull(), + }, + // Handling empty strings in config + "when access_token is set as an empty string the empty string is used and not ignored": { + ConfigValues: fwmodels.ProviderModel{ + AccessToken: types.StringValue(""), + }, + ExpectedDataModelValue: types.StringValue(""), + }, + "when access_token is set as an empty string, the empty string is not ignored in favor of an environment variable": { + ConfigValues: fwmodels.ProviderModel{ + AccessToken: types.StringValue(""), + }, + EnvVariables: map[string]string{ + "GOOGLE_OAUTH_ACCESS_TOKEN": "value-from-GOOGLE_OAUTH_ACCESS_TOKEN", + }, + ExpectedDataModelValue: types.StringValue(""), + }, + // Handling unknown values + "when access_token is an unknown value, the provider treats it as if it's unset and uses an environment variable instead": { + ConfigValues: fwmodels.ProviderModel{ + AccessToken: types.StringUnknown(), + }, + EnvVariables: map[string]string{ + "GOOGLE_OAUTH_ACCESS_TOKEN": "value-from-GOOGLE_OAUTH_ACCESS_TOKEN", + }, + ExpectedDataModelValue: types.StringValue("value-from-GOOGLE_OAUTH_ACCESS_TOKEN"), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.AccessToken.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want project in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.AccessToken.String()) + } + // fwtransport.FrameworkProviderConfig does not store the credentials info, so test does not make assertions on config struct + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_userProjectOverride(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under tests experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.BoolValue + ExpectedConfigStructValue basetypes.BoolValue + ExpectError bool + }{ + "user_project_override value set in the provider schema is not overridden by ENVs": { + ConfigValues: fwmodels.ProviderModel{ + UserProjectOverride: types.BoolValue(false), + }, + EnvVariables: map[string]string{ + "USER_PROJECT_OVERRIDE": "true", + }, + ExpectedDataModelValue: types.BoolValue(false), + ExpectedConfigStructValue: types.BoolValue(false), + }, + "user_project_override can be set by environment variable: value = true": { + ConfigValues: fwmodels.ProviderModel{ + UserProjectOverride: types.BoolNull(), // not set + }, + EnvVariables: map[string]string{ + "USER_PROJECT_OVERRIDE": "true", + }, + ExpectedDataModelValue: types.BoolValue(true), + ExpectedConfigStructValue: types.BoolValue(true), + }, + "user_project_override can be set by environment variable: value = false": { + ConfigValues: fwmodels.ProviderModel{ + UserProjectOverride: types.BoolNull(), // not set + }, + EnvVariables: map[string]string{ + "USER_PROJECT_OVERRIDE": "false", + }, + ExpectedDataModelValue: types.BoolValue(false), + ExpectedConfigStructValue: types.BoolValue(false), + }, + "user_project_override can be set by environment variable: value = 1": { + ConfigValues: fwmodels.ProviderModel{ + UserProjectOverride: types.BoolNull(), // not set + }, + EnvVariables: map[string]string{ + "USER_PROJECT_OVERRIDE": "1", + }, + ExpectedDataModelValue: types.BoolValue(true), + ExpectedConfigStructValue: types.BoolValue(true), + }, + "user_project_override can be set by environment variable: value = 0": { + ConfigValues: fwmodels.ProviderModel{ + UserProjectOverride: types.BoolNull(), // not set + }, + EnvVariables: map[string]string{ + "USER_PROJECT_OVERRIDE": "0", + }, + ExpectedDataModelValue: types.BoolValue(false), + ExpectedConfigStructValue: types.BoolValue(false), + }, + "setting user_project_override using a non-boolean environment variables results in an error": { + EnvVariables: map[string]string{ + "USER_PROJECT_OVERRIDE": "I'm not a boolean", + }, + ExpectError: true, + }, + "when no user_project_override values are provided via config or environment variables, the field remains unset without error": { + ConfigValues: fwmodels.ProviderModel{ + UserProjectOverride: types.BoolNull(), // not set + }, + ExpectedDataModelValue: types.BoolNull(), + ExpectedConfigStructValue: types.BoolNull(), + }, + // Handling unknown values + "when user_project_override is an unknown value, the provider treats it as if it's unset and uses an environment variable instead": { + ConfigValues: fwmodels.ProviderModel{ + UserProjectOverride: types.BoolUnknown(), + }, + EnvVariables: map[string]string{ + "USER_PROJECT_OVERRIDE": "true", + }, + ExpectedDataModelValue: types.BoolValue(true), + ExpectedConfigStructValue: types.BoolValue(true), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.UserProjectOverride.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want user_project_override in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.UserProjectOverride.String()) + } + // Checking the value passed to the config structs + if !p.UserProjectOverride.Equal(tc.ExpectedConfigStructValue) { + t.Fatalf("want user_project_override in the `FrameworkProviderConfig` struct to be `%s`, but got the value `%s`", tc.ExpectedConfigStructValue, p.UserProjectOverride.String()) + } + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_impersonateServiceAccount(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under tests experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue + ExpectedConfigStructValue basetypes.StringValue + ExpectError bool + }{ + "impersonate_service_account value set in the provider schema is not overridden by environment variables": { + ConfigValues: fwmodels.ProviderModel{ + ImpersonateServiceAccount: types.StringValue("value-from-config@example.com"), + }, + EnvVariables: map[string]string{ + "GOOGLE_IMPERSONATE_SERVICE_ACCOUNT": "value-from-env@example.com", + }, + ExpectedDataModelValue: types.StringValue("value-from-config@example.com"), + }, + "impersonate_service_account value can be set by environment variable": { + ConfigValues: fwmodels.ProviderModel{ + ImpersonateServiceAccount: types.StringNull(), // not set + }, + EnvVariables: map[string]string{ + "GOOGLE_IMPERSONATE_SERVICE_ACCOUNT": "value-from-env@example.com", + }, + ExpectedDataModelValue: types.StringValue("value-from-env@example.com"), + }, + "when no values are provided via config or environment variables, the field remains unset without error": { + ConfigValues: fwmodels.ProviderModel{ + ImpersonateServiceAccount: types.StringNull(), // not set + }, + ExpectedDataModelValue: types.StringNull(), + }, + // Handling empty strings in config + "when impersonate_service_account is set as an empty string the empty string is used and not ignored": { + ConfigValues: fwmodels.ProviderModel{ + ImpersonateServiceAccount: types.StringValue(""), + }, + ExpectedDataModelValue: types.StringValue(""), + }, + "when impersonate_service_account is set as an empty string, the empty string is not ignored in favor of an environment variable": { + ConfigValues: fwmodels.ProviderModel{ + ImpersonateServiceAccount: types.StringValue(""), + }, + EnvVariables: map[string]string{ + "GOOGLE_IMPERSONATE_SERVICE_ACCOUNT": "value-from-env@example.com", + }, + ExpectedDataModelValue: types.StringValue(""), + }, + // Handling unknown values + "when impersonate_service_account is an unknown value, the provider treats it as if it's unset and uses an environment variable instead": { + ConfigValues: fwmodels.ProviderModel{ + ImpersonateServiceAccount: types.StringUnknown(), + }, + EnvVariables: map[string]string{ + "GOOGLE_IMPERSONATE_SERVICE_ACCOUNT": "value-from-env@example.com", + }, + ExpectedDataModelValue: types.StringValue("value-from-env@example.com"), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.ImpersonateServiceAccount.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want impersonate_service_account in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.ImpersonateServiceAccount.String()) + } + // fwtransport.FrameworkProviderConfig does not store impersonate_service_account info, so test does not make assertions on config struct + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_impersonateServiceAccountDelegates(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under tests experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + + cases := map[string]struct { + // It's not easy to define basetypes.ListValue values directly in test case, so instead + // pass values into test function to control construction of basetypes.ListValue there. + SetAsNull bool + SetAsUnknown bool + ImpersonateServiceAccountDelegatesValue []string + EnvVariables map[string]string + + ExpectedNull bool + ExpectedUnknown bool + ExpectedDataModelValue []string + ExpectError bool + }{ + "impersonate_service_account_delegates value can be set in the provider schema": { + ImpersonateServiceAccountDelegatesValue: []string{ + "projects/-/serviceAccounts/my-service-account-1@example.iam.gserviceaccount.com", + "projects/-/serviceAccounts/my-service-account-2@example.iam.gserviceaccount.com", + }, + ExpectedDataModelValue: []string{ + "projects/-/serviceAccounts/my-service-account-1@example.iam.gserviceaccount.com", + "projects/-/serviceAccounts/my-service-account-2@example.iam.gserviceaccount.com", + }, + }, + // Note: no environment variables can be used for impersonate_service_account_delegates + "when no impersonate_service_account_delegates value is provided via config, the field remains unset without error": { + SetAsNull: true, // not setting impersonate_service_account_delegates + ExpectedNull: true, + }, + // Handling empty values in config + "when impersonate_service_account_delegates is set as an empty array, that value isn't ignored": { + ImpersonateServiceAccountDelegatesValue: []string{}, + ExpectedDataModelValue: []string{}, + }, + // Handling unknown values + "when impersonate_service_account_delegates is an unknown value, the provider treats it as if it's unset, without error": { + SetAsUnknown: true, + ExpectedUnknown: true, + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := fwmodels.ProviderModel{} + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + // Set ImpersonateServiceAccountDelegates depending on test case + if !tc.SetAsNull && !tc.SetAsUnknown { + isad, _ := types.ListValueFrom(ctx, types.StringType, tc.ImpersonateServiceAccountDelegatesValue) + data.ImpersonateServiceAccountDelegates = isad + } + if tc.SetAsNull { + data.ImpersonateServiceAccountDelegates = types.ListNull(types.StringType) + } + if tc.SetAsUnknown { + data.ImpersonateServiceAccountDelegates = types.ListUnknown(types.StringType) + } + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + var expected attr.Value + if !tc.ExpectedNull && !tc.ExpectedUnknown { + expected, _ = types.ListValueFrom(ctx, types.StringType, tc.ExpectedDataModelValue) + } + if tc.ExpectedNull { + expected = types.ListNull(types.StringType) + } + if tc.ExpectedUnknown { + expected = types.ListUnknown(types.StringType) + } + if !data.ImpersonateServiceAccountDelegates.Equal(expected) { + t.Fatalf("want impersonate_service_account in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", expected, data.ImpersonateServiceAccountDelegates.String()) + } + // fwtransport.FrameworkProviderConfig does not store impersonate_service_account info, so test does not make assertions on config struct + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_scopes(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under tests experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ScopesValue []string + EnvVariables map[string]string + ExpectedDataModelValue []string + ExpectedConfigStructValue []string + SetAsNull bool + SetAsUnknown bool + ExpectError bool + }{ + "scopes are set in the provider config as a list": { + ScopesValue: []string{"fizz", "buzz", "baz"}, + ExpectedDataModelValue: []string{"fizz", "buzz", "baz"}, + ExpectedConfigStructValue: []string{"fizz", "buzz", "baz"}, + }, + "scopes can be left unset in the provider config without any issues, and a default value is used": { + SetAsNull: true, + ExpectedDataModelValue: transport_tpg.DefaultClientScopes, + ExpectedConfigStructValue: transport_tpg.DefaultClientScopes, + }, + // Handling empty values in config + "scopes set as an empty list the field is treated as if it's unset and a default value is used without errors": { + ScopesValue: []string{}, + ExpectedDataModelValue: transport_tpg.DefaultClientScopes, + ExpectedConfigStructValue: transport_tpg.DefaultClientScopes, + }, + // Handling unknown values + "when scopes is an unknown value, the provider treats it as if it's unset and a default value is used without errors": { + SetAsUnknown: true, + ExpectedDataModelValue: transport_tpg.DefaultClientScopes, + ExpectedConfigStructValue: transport_tpg.DefaultClientScopes, + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := fwmodels.ProviderModel{} + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + // Set ImpersonateServiceAccountDelegates depending on test case + if !tc.SetAsNull && !tc.SetAsUnknown { + s, _ := types.ListValueFrom(ctx, types.StringType, tc.ScopesValue) + data.Scopes = s + } + if tc.SetAsNull { + data.Scopes = types.ListNull(types.StringType) + } + if tc.SetAsUnknown { + data.Scopes = types.ListUnknown(types.StringType) + } + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + expectedDm, _ := types.ListValueFrom(ctx, types.StringType, tc.ExpectedDataModelValue) + if !data.Scopes.Equal(expectedDm) { + t.Fatalf("want project in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.Scopes.String()) + } + // Checking the value passed to the config structs + expectedFpc, _ := types.ListValueFrom(ctx, types.StringType, tc.ExpectedConfigStructValue) + if !p.Scopes.Equal(expectedFpc) { + t.Fatalf("want project in the `FrameworkProviderConfig` struct to be `%s`, but got the value `%s`", tc.ExpectedConfigStructValue, p.Scopes.String()) + } + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_requestReason(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under tests experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue + // ExpectedConfigStructValue not used here, as credentials info isn't stored in the config struct + ExpectError bool + }{ + "when request_reason is unset in the config, environment variable CLOUDSDK_CORE_REQUEST_REASON is used": { + ConfigValues: fwmodels.ProviderModel{ + RequestReason: types.StringNull(), + }, + EnvVariables: map[string]string{ + "CLOUDSDK_CORE_REQUEST_REASON": "foo", + }, + ExpectedDataModelValue: types.StringValue("foo"), + }, + "request_reason set in the config is not overridden by environment variables": { + ConfigValues: fwmodels.ProviderModel{ + RequestReason: types.StringValue("value-from-config"), + }, + EnvVariables: map[string]string{ + "CLOUDSDK_CORE_REQUEST_REASON": "value-from-env", + }, + ExpectedDataModelValue: types.StringValue("value-from-config"), + }, + "when no request_reason is provided via config or environment variables, the field remains unset without error": { + ConfigValues: fwmodels.ProviderModel{ + RequestReason: types.StringNull(), + }, + ExpectedDataModelValue: types.StringNull(), + }, + // Handling empty strings in config + "when request_reason is set as an empty string, the empty string is not ignored in favor of an environment variable": { + ConfigValues: fwmodels.ProviderModel{ + RequestReason: types.StringValue(""), + }, + EnvVariables: map[string]string{ + "CLOUDSDK_CORE_REQUEST_REASON": "foo", + }, + ExpectedDataModelValue: types.StringValue(""), + }, + "when request_reason is set as an empty string the empty string is used and not ignored": { + ConfigValues: fwmodels.ProviderModel{ + RequestReason: types.StringValue(""), + }, + ExpectedDataModelValue: types.StringValue(""), + }, + // Handling unknown values + "when request_reason is an unknown value, the provider treats it as if it's unset and uses an environment variable instead": { + ConfigValues: fwmodels.ProviderModel{ + RequestReason: types.StringUnknown(), + }, + EnvVariables: map[string]string{ + "CLOUDSDK_CORE_REQUEST_REASON": "foo", + }, + ExpectedDataModelValue: types.StringValue("foo"), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.RequestReason.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want request_reason in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.RequestReason.String()) + } + // fwtransport.FrameworkProviderConfig does not store the request reason info, so test does not make assertions on config struct + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_requestTimeout(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under tests experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + ConfigValues fwmodels.ProviderModel + EnvVariables map[string]string + ExpectedDataModelValue basetypes.StringValue + // ExpectedConfigStructValue not used here, as credentials info isn't stored in the config struct + ExpectError bool + }{ + "if a valid request_timeout is configured in the provider, no error will occur": { + ConfigValues: fwmodels.ProviderModel{ + RequestTimeout: types.StringValue("10s"), + }, + ExpectedDataModelValue: types.StringValue("10s"), + }, + "if an invalid request_timeout is configured in the provider, an error will occur": { + ConfigValues: fwmodels.ProviderModel{ + RequestTimeout: types.StringValue("timeout"), + }, + ExpectError: true, + }, + "when request_timeout is set as an empty string, the empty string isn't ignored and an error will occur": { + ConfigValues: fwmodels.ProviderModel{ + RequestTimeout: types.StringValue(""), + }, + ExpectError: true, + }, + // In the SDK version of the provider config code, this scenario results in a value of "0s" + // instead of "120s", but the final 'effective' value is also "120s" + // See : https://github.com/hashicorp/terraform-provider-google/blob/09cb850ee64bcd78e4457df70905530c1ed75f19/google/transport/config.go#L1228-L1233 + "when request_timeout is unset in the config, the default value is 120s.": { + ConfigValues: fwmodels.ProviderModel{ + RequestTimeout: types.StringNull(), + }, + ExpectedDataModelValue: types.StringValue("120s"), + }, + // Handling unknown values + "when request_timeout is an unknown value, the provider treats it as if it's unset and uses the default value 120s": { + ConfigValues: fwmodels.ProviderModel{ + RequestTimeout: types.StringUnknown(), + }, + ExpectedDataModelValue: types.StringValue("120s"), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := tc.ConfigValues + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s : %s", num, err.Summary(), err.Detail()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.RequestTimeout.Equal(tc.ExpectedDataModelValue) { + t.Fatalf("want request_timeout in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectedDataModelValue, data.RequestTimeout.String()) + } + // fwtransport.FrameworkProviderConfig does not store the request timeout info, so test does not make assertions on config struct + }) + } +} + +func TestFrameworkProvider_LoadAndValidateFramework_batching(t *testing.T) { + + // Note: In the test function we need to set the below fields in test case's fwmodels.ProviderModel value + // this is to stop the code under tests experiencing errors, and could be addressed in future refactoring. + // - Credentials: If we don't set this then the test looks for application default credentials and can fail depending on the machine running the test + // - ImpersonateServiceAccountDelegates: If we don't set this, we get a nil pointer exception ¯\_(ツ)_/¯ + + cases := map[string]struct { + // It's not easy to create the value of Batching in the test case, so these inputs are used in the test function + SetBatchingAsNull bool + SetBatchingAsUnknown bool + EnableBatchingValue basetypes.BoolValue + SendAfterValue basetypes.StringValue + + EnvVariables map[string]string + + ExpectBatchingNull bool + ExpectBatchingUnknown bool + ExpectEnableBatchingValue basetypes.BoolValue + ExpectSendAfterValue basetypes.StringValue + ExpectError bool + }{ + "batching can be configured with values for enable_batching and send_after": { + EnableBatchingValue: types.BoolValue(true), + SendAfterValue: types.StringValue("45s"), + ExpectEnableBatchingValue: types.BoolValue(true), + ExpectSendAfterValue: types.StringValue("45s"), + }, + "if batching is an empty block, it will set the default values for enable_batching and send_after": { + // In this test, we try to create a list containing only null values + EnableBatchingValue: types.BoolNull(), + SendAfterValue: types.StringNull(), + ExpectEnableBatchingValue: types.BoolValue(true), + ExpectSendAfterValue: types.StringValue("10s"), + }, + "when batching is configured with only enable_batching, send_after will be set to a default value": { + EnableBatchingValue: types.BoolValue(true), + SendAfterValue: types.StringNull(), + ExpectEnableBatchingValue: types.BoolValue(true), + ExpectSendAfterValue: types.StringValue("10s"), + }, + "when batching is configured with only send_after, enable_batching will be set to a default value": { + EnableBatchingValue: types.BoolNull(), + SendAfterValue: types.StringValue("45s"), + ExpectEnableBatchingValue: types.BoolValue(true), + ExpectSendAfterValue: types.StringValue("45s"), + }, + "when the whole batching block is a null value, the provider provides default values for send_after and enable_batching": { + SetBatchingAsNull: true, + ExpectEnableBatchingValue: types.BoolValue(true), + ExpectSendAfterValue: types.StringValue("3s"), + }, + // Handling unknown values + "when batching is an unknown value, the provider treats it as if it's unset (align to SDK behaviour)": { + SetBatchingAsUnknown: true, + ExpectEnableBatchingValue: types.BoolValue(true), + ExpectSendAfterValue: types.StringValue("3s"), + }, + "when batching is configured with send_after as an unknown value, send_after will be set to a default value": { + EnableBatchingValue: types.BoolValue(true), + SendAfterValue: types.StringUnknown(), + ExpectEnableBatchingValue: types.BoolValue(true), + ExpectSendAfterValue: types.StringValue("10s"), + }, + "when batching is configured with enable_batching as an unknown value, enable_batching will be set to a default value": { + EnableBatchingValue: types.BoolUnknown(), + SendAfterValue: types.StringValue("45s"), + ExpectEnableBatchingValue: types.BoolValue(true), + ExpectSendAfterValue: types.StringValue("45s"), + }, + // Error states + "when batching is configured with send_after as an empty string, the empty string is not ignored and results in an error": { + EnableBatchingValue: types.BoolValue(true), + SendAfterValue: types.StringValue(""), + ExpectError: true, + }, + "if batching is configured with send_after as an invalid value, there's an error": { + SendAfterValue: types.StringValue("invalid value"), + ExpectError: true, + }, + "if batching is configured with send_after as number value without seconds (s), there's an error": { + SendAfterValue: types.StringValue("123"), + ExpectError: true, + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + // Arrange + acctest.UnsetTestProviderConfigEnvs(t) + acctest.SetupTestEnvs(t, tc.EnvVariables) + + ctx := context.Background() + tfVersion := "foobar" + providerversion := "999" + diags := diag.Diagnostics{} + + data := fwmodels.ProviderModel{} + data.Credentials = types.StringValue(transport_tpg.TestFakeCredentialsPath) + impersonateServiceAccountDelegates, _ := types.ListValue(types.StringType, []attr.Value{}) // empty list + data.ImpersonateServiceAccountDelegates = impersonateServiceAccountDelegates + + // TODO(SarahFrench) - this code will change when batching is reworked + // See https://github.com/GoogleCloudPlatform/magic-modules/pull/7668 + if !tc.SetBatchingAsNull && !tc.SetBatchingAsUnknown { + b, _ := types.ObjectValue( + map[string]attr.Type{ + "enable_batching": types.BoolType, + "send_after": types.StringType, + }, + map[string]attr.Value{ + "enable_batching": tc.EnableBatchingValue, + "send_after": tc.SendAfterValue, + }, + ) + batching, _ := types.ListValue(types.ObjectType{}.WithAttributeTypes(fwmodels.ProviderBatchingAttributes), []attr.Value{b}) + data.Batching = batching + } + if tc.SetBatchingAsNull { + data.Batching = types.ListNull(types.ObjectType{}.WithAttributeTypes(fwmodels.ProviderBatchingAttributes)) + } + if tc.SetBatchingAsUnknown { + data.Batching = types.ListUnknown(types.ObjectType{}.WithAttributeTypes(fwmodels.ProviderBatchingAttributes)) + } + + p := fwtransport.FrameworkProviderConfig{} + + // Act + p.LoadAndValidateFramework(ctx, &data, tfVersion, &diags, providerversion) + + // Assert + if diags.HasError() && tc.ExpectError { + return + } + if diags.HasError() && !tc.ExpectError { + for i, err := range diags.Errors() { + num := i + 1 + t.Logf("unexpected error #%d : %s", num, err.Summary()) + } + t.Fatalf("did not expect error, but [%d] error(s) occurred", diags.ErrorsCount()) + } + // Checking mutation of the data model + if !data.Batching.IsNull() && tc.ExpectBatchingNull { + t.Fatalf("want batching in the `fwmodels.ProviderModel` struct to be null, but got the value `%s`", data.Batching.String()) + } + if !data.Batching.IsUnknown() && tc.ExpectBatchingUnknown { + t.Fatalf("want batching in the `fwmodels.ProviderModel` struct to be unknown, but got the value `%s`", data.Batching.String()) + } + + // The code doesn't mutate values in the fwmodels.ProviderModel struct if the whole batching block is null/unknown, + // so run these checks below only if we're not setting the whole batching block is null/unknown + if !tc.SetBatchingAsNull && !tc.SetBatchingAsUnknown { + var pbConfigs []fwmodels.ProviderBatching + _ = data.Batching.ElementsAs(ctx, &pbConfigs, true) + if !pbConfigs[0].EnableBatching.Equal(tc.ExpectEnableBatchingValue) { + t.Fatalf("want batching.enable_batching in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectEnableBatchingValue.String(), pbConfigs[0].EnableBatching.String()) + } + if !pbConfigs[0].SendAfter.Equal(tc.ExpectSendAfterValue) { + t.Fatalf("want batching.send_after in the `fwmodels.ProviderModel` struct to be `%s`, but got the value `%s`", tc.ExpectSendAfterValue.String(), pbConfigs[0].SendAfter.String()) + } + } + + // Check how the batching block's values are used to configure other parts of the `FrameworkProviderConfig` struct + // - RequestBatcherServiceUsage + // - RequestBatcherIam + if p.RequestBatcherServiceUsage.BatchingConfig.EnableBatching != tc.ExpectEnableBatchingValue.ValueBool() { + t.Fatalf("want batching.enable_batching to be `%s`, but got the value `%v`", tc.ExpectEnableBatchingValue.String(), p.RequestBatcherServiceUsage.BatchingConfig.EnableBatching) + } + if !types.StringValue(p.RequestBatcherServiceUsage.BatchingConfig.SendAfter.String()).Equal(tc.ExpectSendAfterValue) { + t.Fatalf("want batching.send_after to be `%s`, but got the value `%s`", tc.ExpectSendAfterValue.String(), p.RequestBatcherServiceUsage.BatchingConfig.SendAfter.String()) + } + if p.RequestBatcherIam.BatchingConfig.EnableBatching != tc.ExpectEnableBatchingValue.ValueBool() { + t.Fatalf("want batching.enable_batching to be `%s`, but got the value `%v`", tc.ExpectEnableBatchingValue.String(), p.RequestBatcherIam.BatchingConfig.EnableBatching) + } + if !types.StringValue(p.RequestBatcherIam.BatchingConfig.SendAfter.String()).Equal(tc.ExpectSendAfterValue) { + t.Fatalf("want batching.send_after to be `%s`, but got the value `%s`", tc.ExpectSendAfterValue.String(), p.RequestBatcherIam.BatchingConfig.SendAfter.String()) + } + }) + } +} + +func TestGetRegionFromRegionSelfLink(t *testing.T) { + cases := map[string]struct { + Input basetypes.StringValue + ExpectedOutput basetypes.StringValue + }{ + "A short region name is returned unchanged": { + Input: types.StringValue("us-central1"), + ExpectedOutput: types.StringValue("us-central1"), + }, + "A selflink is shortened to a region name": { + Input: types.StringValue("https://www.googleapis.com/compute/v1/projects/my-project/regions/us-central1"), + ExpectedOutput: types.StringValue("us-central1"), + }, + "Logic is specific to region selflinks; zone selflinks are not shortened": { + Input: types.StringValue("https://www.googleapis.com/compute/v1/projects/my-project/zones/asia-east1-a"), + ExpectedOutput: types.StringValue("https://www.googleapis.com/compute/v1/projects/my-project/zones/asia-east1-a"), + }, + } + + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + + region := fwtransport.GetRegionFromRegionSelfLink(tc.Input) + + if region != tc.ExpectedOutput { + t.Fatalf("want %s, got %s", region, tc.ExpectedOutput) + } + }) + } +} diff --git a/mmv1/third_party/terraform/fwtransport/go/framework_provider_clients.go.tmpl b/mmv1/third_party/terraform/fwtransport/go/framework_provider_clients.go.tmpl new file mode 100644 index 000000000000..1001fa557202 --- /dev/null +++ b/mmv1/third_party/terraform/fwtransport/go/framework_provider_clients.go.tmpl @@ -0,0 +1,55 @@ +package fwtransport + +import ( + "fmt" + "strings" + + "google.golang.org/api/dns/v1" +{{- if ne $.TargetVersionName "ga" }} + firebase "google.golang.org/api/firebase/v1beta1" +{{- end }} + "google.golang.org/api/option" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-log/tflog" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +// Methods to create new services from config +// Some base paths below need the version and possibly more of the path +// set on them. The client libraries are inconsistent about which values they need; +// while most only want the host URL, some older ones also want the version and some +// of those "projects" as well. You can find out if this is required by looking at +// the basePath value in the client library file. + +func (p *FrameworkProviderConfig) NewDnsClient(userAgent string, diags *diag.Diagnostics) *dns.Service { + dnsClientBasePath := transport_tpg.RemoveBasePathVersion(p.DNSBasePath) + dnsClientBasePath = strings.ReplaceAll(dnsClientBasePath, "/dns/", "") + tflog.Info(p.Context, fmt.Sprintf("Instantiating Google Cloud DNS client for path %s", dnsClientBasePath)) + clientDns, err := dns.NewService(p.Context, option.WithHTTPClient(p.Client)) + if err != nil { + diags.AddWarning("error creating client dns", err.Error()) + return nil + } + clientDns.UserAgent = userAgent + clientDns.BasePath = dnsClientBasePath + + return clientDns +} + +{{ if ne $.TargetVersionName `ga` -}} +func (p *FrameworkProviderConfig) NewFirebaseClient(userAgent string, diags *diag.Diagnostics) *firebase.Service { + firebaseClientBasePath := transport_tpg.RemoveBasePathVersion(p.FirebaseBasePath) + firebaseClientBasePath = strings.ReplaceAll(firebaseClientBasePath, "/firebase/", "") + tflog.Info(p.Context, fmt.Sprintf("Instantiating Google Cloud firebase client for path %s", firebaseClientBasePath)) + clientFirebase, err := firebase.NewService(p.Context, option.WithHTTPClient(p.Client)) + if err != nil { + diags.AddWarning("error creating client firebase", err.Error()) + return nil + } + clientFirebase.UserAgent = userAgent + clientFirebase.BasePath = firebaseClientBasePath + + return clientFirebase +} +{{- end }} diff --git a/mmv1/third_party/terraform/go.mod.erb b/mmv1/third_party/terraform/go.mod.erb index 1e6bde117210..eca77fa58285 100644 --- a/mmv1/third_party/terraform/go.mod.erb +++ b/mmv1/third_party/terraform/go.mod.erb @@ -5,7 +5,7 @@ go 1.21 require ( cloud.google.com/go/bigtable v1.24.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.67.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.68.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/go.sum b/mmv1/third_party/terraform/go.sum index cd2fce52ae52..30534d3e3b1c 100644 --- a/mmv1/third_party/terraform/go.sum +++ b/mmv1/third_party/terraform/go.sum @@ -18,8 +18,8 @@ cloud.google.com/go/longrunning v0.5.7/go.mod h1:8GClkudohy1Fxm3owmBGid8W0pSgodE dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.67.0 h1:FBKsgWIOEdtpx2YuF+aBH33K0Ih25D3xuKyp9peH4jc= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.67.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.68.0 h1:LIPIYi4hy7ttUSrziY/TYwMDuEvvV593n80kRmz6nZ4= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.68.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/ProtonMail/go-crypto v1.1.0-alpha.0 h1:nHGfwXmFvJrSR9xu8qL7BkO4DqTHXE9N5vPhgY2I+j0= diff --git a/mmv1/third_party/terraform/.copywrite.hcl.tmpl b/mmv1/third_party/terraform/go/.copywrite.hcl.tmpl similarity index 92% rename from mmv1/third_party/terraform/.copywrite.hcl.tmpl rename to mmv1/third_party/terraform/go/.copywrite.hcl.tmpl index 693bde6062ee..601fe64bb238 100644 --- a/mmv1/third_party/terraform/.copywrite.hcl.tmpl +++ b/mmv1/third_party/terraform/go/.copywrite.hcl.tmpl @@ -16,11 +16,11 @@ project { ".changelog/**", "examples/**", "scripts/**", -{{- if or (eq $.TargetVersionName "") (eq $.TargetVersionName "ga")}} +{{- if or (eq $.TargetVersionName "") (eq $.TargetVersionName "ga") }} "google/**/test-fixtures/**", -{{- else}} - "google-{{$.TargetVersionName}}/**/test-fixtures/**", -{{- end}} +{{- else }} + "google-{{ $.TargetVersionName }}/**/test-fixtures/**", +{{- end }} "META.d/*.yml", "META.d/*.yaml", ".golangci.yml", diff --git a/mmv1/third_party/terraform/.goreleaser.yml.tmpl b/mmv1/third_party/terraform/go/.goreleaser.yml.tmpl similarity index 59% rename from mmv1/third_party/terraform/.goreleaser.yml.tmpl rename to mmv1/third_party/terraform/go/.goreleaser.yml.tmpl index 9850819521ea..99665377d314 100644 --- a/mmv1/third_party/terraform/.goreleaser.yml.tmpl +++ b/mmv1/third_party/terraform/go/.goreleaser.yml.tmpl @@ -4,10 +4,10 @@ archives: - src: 'LICENSE' dst: 'LICENSE.txt' format: zip - name_template: '{{"{{ .ProjectName }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}"}}' + name_template: '{{"{{"}} .ProjectName {{"}}"}}_{{"{{"}} .Version {{"}}"}}_{{"{{"}} .Os {{"}}"}}_{{"{{"}} .Arch {{"}}"}}' builds: - # Special binary naming is only necessary for Terraform CLI 0.12 - binary: '{{"{{ .ProjectName }}_v{{ .Version }}"}}_x5' + binary: '{{"{{"}} .ProjectName {{"}}"}}_v{{"{{"}} .Version {{"}}"}}_x5' env: - CGO_ENABLED=0 flags: @@ -30,29 +30,29 @@ builds: - goarch: arm64 goos: windows ldflags: - - -s -w -X github.com/hashicorp/terraform-provider-google{{- if ne $.TargetVersionName "ga" -}}-{{$.TargetVersionName}}{{- end }}/version.ProviderVersion={{"{{.Version}}"}} - mod_timestamp: '{{"{{ .CommitTimestamp }}"}}' + - -s -w -X github.com/hashicorp/terraform-provider-google{{- if ne $.TargetVersionName "ga" -}}-{{$.TargetVersionName}}{{- end }}/version.ProviderVersion={{"{{"}}.Version{{"}}"}} + mod_timestamp: '{{"{{"}} .CommitTimestamp {{"}}"}}' checksum: extra_files: - glob: 'terraform-registry-manifest.json' - name_template: '{{"{{ .ProjectName }}_{{ .Version }}"}}_manifest.json' - name_template: '{{"{{ .ProjectName }}_{{ .Version }}"}}_SHA256SUMS' + name_template: '{{"{{"}} .ProjectName {{"}}"}}_{{"{{"}} .Version {{"}}"}}_manifest.json' + name_template: '{{"{{"}} .ProjectName {{"}}"}}_{{"{{"}} .Version {{"}}"}}_SHA256SUMS' algorithm: sha256 publishers: - name: upload checksum: true extra_files: - glob: 'terraform-registry-manifest.json' - name_template: '{{"{{ .ProjectName }}_{{ .Version }}"}}_manifest.json' + name_template: '{{"{{"}} .ProjectName {{"}}"}}_{{"{{"}} .Version {{"}}"}}_manifest.json' signature: true - cmd: hc-releases upload -product {{"{{ .ProjectName }} -version {{ .Version }} -file={{ .ArtifactPath }}={{ .ArtifactName }}"}} -header="x-terraform-protocol-version=5.0" -header="x-terraform-protocol-versions=5.0" + cmd: hc-releases upload -product {{"{{"}} .ProjectName {{"}}"}} -version {{"{{"}} .Version {{"}}"}} -file={{"{{"}} .ArtifactPath {{"}}"}}={{"{{"}} .ArtifactName {{"}}"}} -header="x-terraform-protocol-version=5.0" -header="x-terraform-protocol-versions=5.0" env: - - HC_RELEASES_HOST={{"{{ .Env.HC_RELEASES_HOST }}"}} - - HC_RELEASES_KEY={{"{{ .Env.HC_RELEASES_KEY }}"}} + - HC_RELEASES_HOST={{"{{"}} .Env.HC_RELEASES_HOST {{"}}"}} + - HC_RELEASES_KEY={{"{{"}} .Env.HC_RELEASES_KEY {{"}}"}} release: extra_files: - glob: 'terraform-registry-manifest.json' - name_template: '{{"{{ .ProjectName }}_{{ .Version }}"}}_manifest.json' + name_template: '{{"{{"}} .ProjectName {{"}}"}}_{{"{{"}} .Version {{"}}"}}_manifest.json' ids: - none signs: @@ -81,4 +81,4 @@ signs: --out ${signature} artifacts: checksum snapshot: - name_template: "{{"{{ .Tag }}"}}-next" + name_template: "{{"{{"}} .Tag {{"}}"}}-next" diff --git a/mmv1/third_party/terraform/go.mod b/mmv1/third_party/terraform/go/go.mod similarity index 99% rename from mmv1/third_party/terraform/go.mod rename to mmv1/third_party/terraform/go/go.mod index 491855828e38..9aeb1b3967d5 100644 --- a/mmv1/third_party/terraform/go.mod +++ b/mmv1/third_party/terraform/go/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( cloud.google.com/go/bigtable v1.24.0 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.67.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.68.0 github.com/apparentlymart/go-cidr v1.1.0 github.com/davecgh/go-spew v1.1.1 github.com/dnaeon/go-vcr v1.0.1 diff --git a/mmv1/third_party/terraform/main.go.tmpl b/mmv1/third_party/terraform/go/main.go.tmpl similarity index 95% rename from mmv1/third_party/terraform/main.go.tmpl rename to mmv1/third_party/terraform/go/main.go.tmpl index 70e1fa734bc3..fed8e948f027 100644 --- a/mmv1/third_party/terraform/main.go.tmpl +++ b/mmv1/third_party/terraform/go/main.go.tmpl @@ -33,7 +33,7 @@ func main() { // concat with sdkv2 provider providers := []func() tfprotov5.ProviderServer{ providerserver.NewProtocol5(fwprovider.New(version)), // framework provider - provider.Provider().GRPCProvider, // sdk provider + provider.Provider().GRPCProvider, // sdk provider } // use the muxer diff --git a/mmv1/third_party/terraform/release-metadata.hcl.tmpl b/mmv1/third_party/terraform/go/release-metadata.hcl.tmpl similarity index 100% rename from mmv1/third_party/terraform/release-metadata.hcl.tmpl rename to mmv1/third_party/terraform/go/release-metadata.hcl.tmpl diff --git a/mmv1/third_party/terraform/terraform-registry-manifest.json b/mmv1/third_party/terraform/go/terraform-registry-manifest.json similarity index 100% rename from mmv1/third_party/terraform/terraform-registry-manifest.json rename to mmv1/third_party/terraform/go/terraform-registry-manifest.json diff --git a/mmv1/third_party/terraform/provider/go/provider_test.go.tmpl b/mmv1/third_party/terraform/provider/go/provider_test.go.tmpl new file mode 100644 index 000000000000..13fc3693271d --- /dev/null +++ b/mmv1/third_party/terraform/provider/go/provider_test.go.tmpl @@ -0,0 +1,500 @@ +package provider_test + +import ( + "fmt" + "regexp" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/provider" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestProvider(t *testing.T) { + if err := provider.Provider().InternalValidate(); err != nil { + t.Fatalf("err: %s", err) + } +} + +func TestProvider_impl(t *testing.T) { + var _ *schema.Provider = provider.Provider() +} + +func TestProvider_noDuplicatesInResourceMap(t *testing.T) { + _, err := provider.ResourceMapWithErrors() + if err != nil { + t.Error(err) + } +} + +func TestProvider_noDuplicatesInDatasourceMap(t *testing.T) { + _, err := provider.DatasourceMapWithErrors() + if err != nil { + t.Error(err) + } +} + +func TestAccProviderBasePath_setBasePath(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeAddressDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccProviderBasePath_setBasePath("https://www.googleapis.com/compute/beta/", acctest.RandString(t, 10)), + }, + { + ResourceName: "google_compute_address.default", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccProviderBasePath_setInvalidBasePath(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeAddressDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccProviderBasePath_setBasePath("https://www.example.com/compute/beta/", acctest.RandString(t, 10)), + ExpectError: regexp.MustCompile("got HTTP response code 404 with body"), + }, + }, + }) +} + +func TestAccProviderMeta_setModuleName(t *testing.T) { + t.Parallel() + + moduleName := "my-module" + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeAddressDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccProviderMeta_setModuleName(moduleName, acctest.RandString(t, 10)), + }, + { + ResourceName: "google_compute_address.default", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccProviderUserProjectOverride(t *testing.T) { + // Parallel fine-grained resource creation + acctest.SkipIfVcr(t) + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + billing := envvar.GetTestBillingAccountFromEnv(t) + pid := "tf-test-" + acctest.RandString(t, 10) + topicName := "tf-test-topic-" + acctest.RandString(t, 10) + + config := acctest.BootstrapConfig(t) + accessToken, err := acctest.SetupProjectsAndGetAccessToken(org, billing, pid, "pubsub", config) + if err != nil { + t.Error(err) + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + // No TestDestroy since that's not really the point of this test + Steps: []resource.TestStep{ + { + Config: testAccProviderUserProjectOverride_step2(accessToken, pid, false, topicName), + ExpectError: regexp.MustCompile("Cloud Pub/Sub API has not been used"), + }, + { + Config: testAccProviderUserProjectOverride_step2(accessToken, pid, true, topicName), + }, + { + ResourceName: "google_pubsub_topic.project-2-topic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccProviderUserProjectOverride_step3(accessToken, true), + }, + }, + }) +} + +// Do the same thing as TestAccProviderUserProjectOverride, but using a resource that gets its project via +// a reference to a different resource instead of a project field. +func TestAccProviderIndirectUserProjectOverride(t *testing.T) { + // Parallel fine-grained resource creation + acctest.SkipIfVcr(t) + t.Parallel() + + org := envvar.GetTestOrgFromEnv(t) + billing := envvar.GetTestBillingAccountFromEnv(t) + pid := "tf-test-" + acctest.RandString(t, 10) + + config := acctest.BootstrapConfig(t) + accessToken, err := acctest.SetupProjectsAndGetAccessToken(org, billing, pid, "cloudkms", config) + if err != nil { + t.Error(err) + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + // No TestDestroy since that's not really the point of this test + Steps: []resource.TestStep{ + { + Config: testAccProviderIndirectUserProjectOverride_step2(pid, accessToken, false), + ExpectError: regexp.MustCompile(`Cloud Key Management Service \(KMS\) API has not been used`), + }, + { + Config: testAccProviderIndirectUserProjectOverride_step2(pid, accessToken, true), + }, + { + ResourceName: "google_kms_crypto_key.project-2-key", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccProviderIndirectUserProjectOverride_step3(accessToken, true), + }, + }, + }) +} + +func TestAccProviderCredentialsEmptyString(t *testing.T) { + // Test is not parallel because ENVs are set. + // Need to skip VCR as this test downloads providers from the Terraform Registry + acctest.SkipIfVcr(t) + + creds := envvar.GetTestCredsFromEnv() + project := envvar.GetTestProjectFromEnv() + t.Setenv("GOOGLE_CREDENTIALS", creds) + t.Setenv("GOOGLE_PROJECT", project) + + pid := "tf-test-" + acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + // No TestDestroy since that's not really the point of this test + Steps: []resource.TestStep{ + { + // This is a control for the other test steps; the provider block doesn't contain `credentials = ""` + Config: testAccProviderCredentials_actWithCredsFromEnv(pid), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + PlanOnly: true, + ExpectNonEmptyPlan: true, + }, + { + // Assert that errors are expected with credentials when + // - GOOGLE_CREDENTIALS is set + // - provider block has credentials = "" + // - TPG v4.60.2 is used + // Context: this was an addidental breaking change introduced with muxing + Config: testAccProviderCredentials_actWithCredsFromEnv_emptyString(pid), + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "4.60.2", + Source: "hashicorp/google", + }, + }, + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`unexpected end of JSON input`), + }, + { + // Assert that errors are NOT expected with credentials when + // - GOOGLE_CREDENTIALS is set + // - provider block has credentials = "" + // - TPG v4.84.0 is used + // Context: this was the fix for the unintended breaking change in 4.60.2 + Config: testAccProviderCredentials_actWithCredsFromEnv_emptyString(pid), + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "4.84.0", + Source: "hashicorp/google", + }, + }, + PlanOnly: true, + ExpectNonEmptyPlan: true, + }, + { + // Validation errors are expected in 5.0.0+ + // Context: we intentionally introduced the breaking change again in 5.0.0+ + Config: testAccProviderCredentials_actWithCredsFromEnv_emptyString(pid), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`expected a non-empty string`), + }, + }, + }) +} + +func TestAccProviderEmptyStrings(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + // No TestDestroy since that's not really the point of this test + Steps: []resource.TestStep{ + // When no values are set in the provider block there are no errors + // This test case is a control to show validation doesn't accidentally flag unset fields + // The "" argument is a lack of key = value being passed into the provider block + { + Config: testAccProvider_checkPlanTimeErrors("", acctest.RandString(t, 10)), + PlanOnly: true, + ExpectNonEmptyPlan: true, + }, + // credentials as an empty string causes a validation error + { + Config: testAccProvider_checkPlanTimeErrors(`credentials = ""`, acctest.RandString(t, 10)), + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`expected a non-empty string`), + }, + // access_token as an empty string causes a validation error + { + Config: testAccProvider_checkPlanTimeErrors(`access_token = ""`, acctest.RandString(t, 10)), + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`expected a non-empty string`), + }, + // impersonate_service_account as an empty string causes a validation error + { + Config: testAccProvider_checkPlanTimeErrors(`impersonate_service_account = ""`, acctest.RandString(t, 10)), + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`expected a non-empty string`), + }, + // project as an empty string causes a validation error + { + Config: testAccProvider_checkPlanTimeErrors(`project = ""`, acctest.RandString(t, 10)), + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`expected a non-empty string`), + }, + // billing_project as an empty string causes a validation error + { + Config: testAccProvider_checkPlanTimeErrors(`billing_project = ""`, acctest.RandString(t, 10)), + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`expected a non-empty string`), + }, + // region as an empty string causes a validation error + { + Config: testAccProvider_checkPlanTimeErrors(`region = ""`, acctest.RandString(t, 10)), + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`expected a non-empty string`), + }, + // zone as an empty string causes a validation error + { + Config: testAccProvider_checkPlanTimeErrors(`zone = ""`, acctest.RandString(t, 10)), + PlanOnly: true, + ExpectNonEmptyPlan: true, + ExpectError: regexp.MustCompile(`expected a non-empty string`), + }, + }, + }) +} + +func testAccProviderBasePath_setBasePath(endpoint, name string) string { + return fmt.Sprintf(` +provider "google" { + alias = "compute_custom_endpoint" + compute_custom_endpoint = "%s" +} + +resource "google_compute_address" "default" { + provider = google.compute_custom_endpoint + name = "tf-test-address-%s" +}`, endpoint, name) +} + +func testAccProviderMeta_setModuleName(key, name string) string { + return fmt.Sprintf(` +terraform { + provider_meta "google" { + module_name = "%s" + } +} + +resource "google_compute_address" "default" { + name = "tf-test-address-%s" +}`, key, name) +} + +// Set up two projects. Project 1 has a service account that is used to create a +// pubsub topic in project 2. The pubsub API is only enabled in project 2, +// which causes the create to fail unless user_project_override is set to true. + +func testAccProviderUserProjectOverride_step2(accessToken, pid string, override bool, topicName string) string { + return fmt.Sprintf(` +// See step 3 below, which is really step 2 minus the pubsub topic. +// Step 3 exists because provider configurations can't be removed while objects +// created by that provider still exist in state. Step 3 will remove the +// pubsub topic so the whole config can be deleted. +%s + +resource "google_pubsub_topic" "project-2-topic" { + provider = google.project-1-token + project = "%s-2" + + name = "%s" + labels = { + foo = "bar" + } +} +`, testAccProviderUserProjectOverride_step3(accessToken, override), pid, topicName) +} + +func testAccProviderUserProjectOverride_step3(accessToken string, override bool) string { + return fmt.Sprintf(` +provider "google" { + alias = "project-1-token" + access_token = "%s" + user_project_override = %v +} +`, accessToken, override) +} + +func testAccProviderIndirectUserProjectOverride_step2(pid, accessToken string, override bool) string { + return fmt.Sprintf(` +// See step 3 below, which is really step 2 minus the kms resources. +// Step 3 exists because provider configurations can't be removed while objects +// created by that provider still exist in state. Step 3 will remove the +// kms resources so the whole config can be deleted. +%s + +resource "google_kms_key_ring" "project-2-keyring" { + provider = google.project-1-token + project = "%s-2" + + name = "%s" + location = "us-central1" +} + +resource "google_kms_crypto_key" "project-2-key" { + provider = google.project-1-token + name = "%s" + key_ring = google_kms_key_ring.project-2-keyring.id +} + +data "google_kms_secret_ciphertext" "project-2-ciphertext" { + provider = google.project-1-token + crypto_key = google_kms_crypto_key.project-2-key.id + plaintext = "my-secret" +} +`, testAccProviderIndirectUserProjectOverride_step3(accessToken, override), pid, pid, pid) +} + +func testAccProviderIndirectUserProjectOverride_step3(accessToken string, override bool) string { + return fmt.Sprintf(` +provider "google" { + alias = "project-1-token" + + access_token = "%s" + user_project_override = %v +} +`, accessToken, override) +} + +// Copy the Mmv1 generated function testAccCheckComputeAddressDestroyProducer from the compute_test package to here, +// as that function is in the _test.go file and not importable. +func testAccCheckComputeAddressDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_compute_address" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}ComputeBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/regions/{{"{{"}}region{{"}}"}}/addresses/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("ComputeAddress still exists at %s", url) + } + } + + return nil + } +} + +func testAccProviderCredentials_actWithCredsFromEnv(name string) string { + return fmt.Sprintf(` +provider "google" { + alias = "testing_credentials" + +} + +resource "google_compute_address" "default" { + provider = google.testing_credentials + name = "%s" +}`, name) +} + +func testAccProviderCredentials_actWithCredsFromEnv_emptyString(name string) string { + return fmt.Sprintf(` +provider "google" { + alias = "testing_credentials" + credentials = "" +} + +resource "google_compute_address" "default" { + provider = google.testing_credentials + name = "%s" +}`, name) +} + +func testAccProvider_checkPlanTimeErrors(providerArgument, randString string) string { + return fmt.Sprintf(` +provider "google" { + %s +} + +# A random resource so that the test can generate a plan (can't check validation errors when plan is empty) +resource "google_pubsub_topic" "example" { + name = "tf-test-planned-resource-%s" +} +`, providerArgument, randString) +} diff --git a/mmv1/third_party/terraform/provider/go/provider_validators.go b/mmv1/third_party/terraform/provider/go/provider_validators.go new file mode 100644 index 000000000000..5c30801f9467 --- /dev/null +++ b/mmv1/third_party/terraform/provider/go/provider_validators.go @@ -0,0 +1,47 @@ +package provider + +import ( + "context" + "fmt" + "os" + + googleoauth "golang.org/x/oauth2/google" +) + +func ValidateCredentials(v interface{}, k string) (warnings []string, errors []error) { + if v == nil { + return + } + creds := v.(string) + + // reject empty strings + if v.(string) == "" { + errors = append(errors, + fmt.Errorf("expected a non-empty string")) + return + } + + // if this is a path and we can stat it, assume it's ok + if _, err := os.Stat(creds); err == nil { + return + } + if _, err := googleoauth.CredentialsFromJSON(context.Background(), []byte(creds)); err != nil { + errors = append(errors, + fmt.Errorf("JSON credentials are not valid: %s", err)) + } + + return +} + +func ValidateEmptyStrings(v interface{}, k string) (warnings []string, errors []error) { + if v == nil { + return + } + + if v.(string) == "" { + errors = append(errors, + fmt.Errorf("expected a non-empty string")) + } + + return +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.erb b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.erb index b8231d948fd0..9e26302c84c0 100644 --- a/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.erb +++ b/mmv1/third_party/terraform/provider/provider_mmv1_resources.go.erb @@ -227,15 +227,11 @@ var handwrittenDatasources = map[string]*schema.Resource{ var generatedIAMDatasources = map[string]*schema.Resource{ // ####### START generated IAM datasources ########### - <% - resources_for_version.each do |object| - unless object[:iam_class_name].nil? - -%> + <% resources_for_version.each do |object| -%> + <% unless object[:iam_class_name].nil? -%> "<%= object[:terraform_name] -%>_iam_policy": tpgiamresource.DataSourceIamPolicy(<%= object[:iam_class_name] -%>IamSchema, <%= object[:iam_class_name] -%>IamUpdaterProducer), - <% - end - end - -%> + <% end -%> + <% end -%> // ####### END generated IAM datasources ########### } @@ -272,18 +268,12 @@ var generatedResources = map[string]*schema.Resource{ <% unless object[:resource_name].nil? -%> "<%= object[:terraform_name] -%>": <%= object[:resource_name] -%>(), <% end -%> - <% - unless object[:iam_class_name].nil? - -%> + <% unless object[:iam_class_name].nil? -%> "<%= object[:terraform_name] -%>_iam_binding": tpgiamresource.ResourceIamBinding(<%= object[:iam_class_name] -%>IamSchema, <%= object[:iam_class_name] -%>IamUpdaterProducer, <%= object[:iam_class_name] -%>IdParseFunc), "<%= object[:terraform_name] -%>_iam_member": tpgiamresource.ResourceIamMember(<%= object[:iam_class_name] -%>IamSchema, <%= object[:iam_class_name] -%>IamUpdaterProducer, <%= object[:iam_class_name] -%>IdParseFunc), "<%= object[:terraform_name] -%>_iam_policy": tpgiamresource.ResourceIamPolicy(<%= object[:iam_class_name] -%>IamSchema, <%= object[:iam_class_name] -%>IamUpdaterProducer, <%= object[:iam_class_name] -%>IdParseFunc), - <% - end # unless object[:iam_class_name].nil? - -%> - <% - end # resources_for_version.each do - -%> + <% end -%> + <% end -%> } var handwrittenResources = map[string]*schema.Resource{ diff --git a/mmv1/third_party/terraform/release-metadata.hcl.erb b/mmv1/third_party/terraform/release-metadata.hcl.erb index ceaa477e2afa..49613b8da720 100644 --- a/mmv1/third_party/terraform/release-metadata.hcl.erb +++ b/mmv1/third_party/terraform/release-metadata.hcl.erb @@ -1,3 +1,3 @@ <% autogen_exception -%> url_source_repository = "https://github.com/hashicorp/terraform-provider-google<%= "-" + version unless version == 'ga' -%>" -url_license = "https://github.com/hashicorp/terraform-provider-google<%= "-" + version unless version == 'ga' -%>/blob/main/LICENSE" \ No newline at end of file +url_license = "https://github.com/hashicorp/terraform-provider-google<%= "-" + version unless version == 'ga' -%>/blob/main/LICENSE" diff --git a/mmv1/third_party/terraform/scripts/go/diff.go.tmpl b/mmv1/third_party/terraform/scripts/go/diff.go.tmpl new file mode 100644 index 000000000000..2eb0c4fe077f --- /dev/null +++ b/mmv1/third_party/terraform/scripts/go/diff.go.tmpl @@ -0,0 +1,173 @@ +package main + + +import ( + "flag" + "fmt" + "reflect" + "runtime" + "sort" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + googleOld "github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }}/{{ $.ProviderFromVersion }}/provider" + // "github.com/hashicorp/terraform-provider-google/google/provider" will be replaced with corresponding package based on the version when generating the provider package + google "github.com/hashicorp/terraform-provider-google/google/provider" +) + +var verbose bool +var vFlag = flag.Bool("verbose", false, "set to true to produce more verbose diffs") +var resourceFlag = flag.String("resource", "", "the name of the terraform resource to diff") + +func main() { + flag.Parse() + if resourceFlag == nil || *resourceFlag == "" { + fmt.Print("resource flag not specified\n") + panic("the resource to diff must be specified") + } + resourceName := *resourceFlag + verbose = *vFlag + m := google.ResourceMap() + res, ok := m[resourceName] + if !ok { + panic(fmt.Sprintf("Unable to find resource in TPGB: %s", resourceName)) + } + m2 := googleOld.ResourceMap() + res2, ok := m2[resourceName] + if !ok { + panic(fmt.Sprintf("Unable to find resource in clean TPGB: %s", resourceName)) + } + fmt.Printf("------------Diffing resource %s------------\n", resourceName) + diffSchema(res2.Schema, res.Schema, []string{}) + fmt.Print("------------Done------------\n") +} + +// Diffs a Terraform resource schema. Calls itself recursively as some fields +// are implemented using schema.Resource as their element type +func diffSchema(old, new map[string]*schema.Schema, path []string) { + var sharedKeys []string + var addedKeys []string + for k := range new { + if _, ok := old[k]; ok { + sharedKeys = append(sharedKeys, k) + } else { + // Key not found in old schema + addedKeys = append(addedKeys, k) + } + } + var missingKeys []string + for k := range old { + if _, ok := new[k]; !ok { + missingKeys = append(missingKeys, k) + } + } + sort.Strings(sharedKeys) + sort.Strings(addedKeys) + sort.Strings(missingKeys) + if len(addedKeys) != 0 { + var qualifiedKeys []string + for _, k := range addedKeys { + qualifiedKeys = append(qualifiedKeys, strings.Join(append(path, k), ".")) + } + fmt.Printf("Fields added in tpgtools: %v\n", qualifiedKeys) + } + if len(missingKeys) != 0 { + var qualifiedKeys []string + for _, k := range missingKeys { + qualifiedKeys = append(qualifiedKeys, strings.Join(append(path, k), ".")) + } + fmt.Printf("Fields missing in tpgtools: %v\n", qualifiedKeys) + } + for _, k := range sharedKeys { + diffSchemaObject(old[k], new[k], append(path, k)) + } +} + +// Diffs a schema.Schema object. Calls itself and diffSchema recursively as +// needed on nested fields. +func diffSchemaObject(old, new *schema.Schema, path []string) { + if old.Required != new.Required { + fmt.Printf("Required status different for path %s, was: %t is now %t\n", strings.Join(path, "."), old.Required, new.Required) + } + if old.Computed != new.Computed { + fmt.Printf("Computed status different for path %s, was: %t is now %t\n", strings.Join(path, "."), old.Computed, new.Computed) + } + if old.Optional != new.Optional { + fmt.Printf("Optional status different for path %s, was: %t is now %t\n", strings.Join(path, "."), old.Optional, new.Optional) + } + if old.ForceNew != new.ForceNew { + fmt.Printf("ForceNew status different for path %s, was: %t is now %t\n", strings.Join(path, "."), old.ForceNew, new.ForceNew) + } + if old.Type != new.Type { + fmt.Printf("Type different for path %s, was: %s is now %s\n", strings.Join(path, "."), old.Type, new.Type) + // Types are different, other diffs won't make sense + return + } + if old.Sensitive != new.Sensitive { + fmt.Printf("Sensitive status different for path %s, was: %t is now %t\n", strings.Join(path, "."), old.Sensitive, new.Sensitive) + } + if old.Deprecated != new.Deprecated { + fmt.Printf("Deprecated status different for path %s, was: %s is now %s\n", strings.Join(path, "."), old.Deprecated, new.Deprecated) + } + if old.MaxItems != new.MaxItems { + fmt.Printf("MaxItems different for path %s, was: %d is now %d\n", strings.Join(path, "."), old.MaxItems, new.MaxItems) + } + if old.MinItems != new.MinItems { + fmt.Printf("MinItems different for path %s, was: %d is now %d\n", strings.Join(path, "."), old.MinItems, new.MinItems) + } + if old.Default != new.Default { + fmt.Printf("Default value different for path %s, was: %v is now %v\n", strings.Join(path, "."), old.Default, new.Default) + } + if old.ConfigMode != new.ConfigMode { + // This is only set on very few complicated resources (instance, container cluster) + fmt.Printf("ConfigMode different for path %s, was: %v is now %v\n", strings.Join(path, "."), old.ConfigMode, new.ConfigMode) + } + // Verbose diffs. Enabled using --verbose flag + if verbose && !reflect.DeepEqual(old.ConflictsWith, new.ConflictsWith) { + fmt.Printf("ConflictsWith different for path %s, was: %v is now %v\n", strings.Join(path, "."), old.ConflictsWith, new.ConflictsWith) + } + oldDiffSuppressFunc := findFunctionName(old.DiffSuppressFunc) + newDiffSuppressFunc := findFunctionName(new.DiffSuppressFunc) + if verbose && oldDiffSuppressFunc != newDiffSuppressFunc { + fmt.Printf("DiffSuppressFunc for path %s, was: %s is now %s\n", strings.Join(path, "."), oldDiffSuppressFunc, newDiffSuppressFunc) + } + oldStateFunc := findFunctionName(old.StateFunc) + newStateFunc := findFunctionName(new.StateFunc) + if verbose && oldStateFunc != newStateFunc { + fmt.Printf("StateFunc for path %s, was: %s is now %s\n", strings.Join(path, "."), oldStateFunc, newStateFunc) + } + oldValidateFunc := findFunctionName(old.ValidateFunc) + newValidateFunc := findFunctionName(new.ValidateFunc) + if verbose && oldValidateFunc != newValidateFunc { + fmt.Printf("ValidateFunc for path %s, was: %s is now %s\n", strings.Join(path, "."), oldValidateFunc, newValidateFunc) + } + oldSet := findFunctionName(old.Set) + newSet := findFunctionName(new.Set) + if verbose && oldSet != newSet { + fmt.Printf("Set function for path %s, was: %s is now %s\n", strings.Join(path, "."), oldSet, newSet) + } + // Recursive calls for nested objects + if old.Type == schema.TypeList || old.Type == schema.TypeMap || old.Type == schema.TypeSet { + oldElem := old.Elem + newElem := new.Elem + if reflect.TypeOf(oldElem) != reflect.TypeOf(newElem) { + fmt.Printf("Elem type different for path %s, was: %T is now %T\n", strings.Join(path, "."), oldElem, newElem) + } + switch v := oldElem.(type) { + case *schema.Resource: + diffSchema(v.Schema, newElem.(*schema.Resource).Schema, path) + case *schema.Schema: + // Primitive unnamed field as only element + diffSchemaObject(v, newElem.(*schema.Schema), append(path, "elem")) + } + } +} +func findFunctionName(f interface{}) string { + ptr := reflect.ValueOf(f).Pointer() + fun := runtime.FuncForPC(ptr) + if fun == nil { + return "" + } + split := strings.Split(fun.Name(), ".") + return split[len(split)-1] +} diff --git a/mmv1/third_party/terraform/scripts/go/run_diff.sh.tmpl b/mmv1/third_party/terraform/scripts/go/run_diff.sh.tmpl new file mode 100644 index 000000000000..79b3987c81b1 --- /dev/null +++ b/mmv1/third_party/terraform/scripts/go/run_diff.sh.tmpl @@ -0,0 +1,30 @@ +#!/bin/bash +set -e +set -x +if [ -z "$1" ]; then + echo "Must provide 1 argument - name of resource to diff, e.g. 'google_compute_forwarding_rule'" + exit 1 +fi + +function cleanup() { + go mod edit -dropreplace=github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }} + go mod edit -droprequire=github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }} +} + +trap cleanup EXIT +if [[ -d ~/go/src/github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }} ]]; then + pushd ~/go/src/github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }} + git clean -fdx + git reset --hard + git checkout main + git pull + popd +else + mkdir -p ~/go/src/github.com/hashicorp + git clone https://github.com/hashicorp/terraform-provider-{{ $.ProviderFromVersion }} ~/go/src/github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }} +fi + + +go mod edit -require=github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }}@v0.0.0 +go mod edit -replace github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }}=$(realpath ~/go/src/github.com/hashicorp/terraform-provider-clean-{{ $.ProviderFromVersion }}) +go run scripts/diff.go --resource $1 --verbose diff --git a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go index 01ef093c0d13..2b451a7b3a0e 100644 --- a/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go +++ b/mmv1/third_party/terraform/services/accesscontextmanager/resource_access_context_manager_services_perimeters_test.go @@ -26,25 +26,28 @@ func testAccAccessContextManagerServicePerimeters_basicTest(t *testing.T) { Config: testAccAccessContextManagerServicePerimeters_basic(org, "my policy", "level", "storage_perimeter", "bigtable_perimeter", "bigquery_omni_perimeter"), }, { - ResourceName: "google_access_context_manager_service_perimeters.test-access", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_access_context_manager_service_perimeters.test-access", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"service_perimeters"}, }, { Config: testAccAccessContextManagerServicePerimeters_update(org, "my policy", "level", "storage_perimeter", "bigquery_perimeter", "bigtable_perimeter", "bigquery_omni_perimeter"), }, { - ResourceName: "google_access_context_manager_service_perimeters.test-access", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_access_context_manager_service_perimeters.test-access", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"service_perimeters"}, }, { Config: testAccAccessContextManagerServicePerimeters_empty(org, "my policy", "level"), }, { - ResourceName: "google_access_context_manager_service_perimeters.test-access", - ImportState: true, - ImportStateVerify: true, + ResourceName: "google_access_context_manager_service_perimeters.test-access", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"service_perimeters"}, }, }, }) diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_gc_policy.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_gc_policy.go index 28a54ca82abf..c9d710371664 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_gc_policy.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_gc_policy.go @@ -216,6 +216,16 @@ func ResourceBigtableGCPolicy() *schema.Resource { in a replicated instance. Possible values are: "ABANDON".`, ValidateFunc: validation.StringInSlice([]string{"ABANDON", ""}, false), }, + + "ignore_warnings": { + Type: schema.TypeBool, + Optional: true, + Description: `Allows ignoring warnings when updating the GC policy. This can be used + to increase the gc policy on replicated clusters. Doing this may make clusters be + inconsistent for a longer period of time, before using this make sure you understand + the risks listed at https://cloud.google.com/bigtable/docs/garbage-collection#increasing`, + Default: false, + }, }, UseJSONNumber: true, } @@ -253,9 +263,14 @@ func resourceBigtableGCPolicyUpsert(d *schema.ResourceData, meta interface{}) er tableName := d.Get("table").(string) columnFamily := d.Get("column_family").(string) + ignoreWarnings := d.Get("ignore_warnings").(bool) + updateOpts := []bigtable.GCPolicyOption{} + if ignoreWarnings { + updateOpts = append(updateOpts, bigtable.IgnoreWarnings()) + } retryFunc := func() error { - reqErr := c.SetGCPolicy(ctx, tableName, columnFamily, gcPolicy) + reqErr := c.SetGCPolicyWithOptions(ctx, tableName, columnFamily, gcPolicy, updateOpts...) return reqErr } // The default create timeout is 20 minutes. diff --git a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_gc_policy_test.go b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_gc_policy_test.go index 28a834acd86a..6176e4bbd64f 100644 --- a/mmv1/third_party/terraform/services/bigtable/resource_bigtable_gc_policy_test.go +++ b/mmv1/third_party/terraform/services/bigtable/resource_bigtable_gc_policy_test.go @@ -39,6 +39,43 @@ func TestAccBigtableGCPolicy_basic(t *testing.T) { }) } +func TestAccBigtableGCPolicy_ignoreWarnings(t *testing.T) { + // bigtable instance does not use the shared HTTP client, this test creates an instance + acctest.SkipIfVcr(t) + t.Parallel() + + instanceName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + tableName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + familyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + cluster1Name := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + cluster2Name := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + gcRulesOriginal := `{"rules":[{"max_age":"10h"}]}` + gcRulesNew := `{"rules":[{"max_age":"12h"}]}` + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckBigtableGCPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigtableGCPolicyIgnoreWarning(instanceName, tableName, familyName, cluster1Name, cluster2Name, gcRulesOriginal, false), + Check: resource.ComposeTestCheckFunc( + testAccBigtableGCPolicyExists(t, "google_bigtable_gc_policy.policy", true), + resource.TestCheckResourceAttr("google_bigtable_gc_policy.policy", "gc_rules", gcRulesOriginal), + ), + }, + { + Config: testAccBigtableGCPolicyIgnoreWarning(instanceName, tableName, familyName, cluster1Name, cluster2Name, gcRulesNew, true), + Check: resource.ComposeTestCheckFunc( + testAccBigtableGCPolicyExists(t, "google_bigtable_gc_policy.policy", true), + resource.TestCheckResourceAttr("google_bigtable_gc_policy.policy", "gc_rules", gcRulesNew), + ), + }, + }, + }) +} + func TestAccBigtableGCPolicy_abandoned(t *testing.T) { // bigtable instance does not use the shared HTTP client, this test creates an instance acctest.SkipIfVcr(t) @@ -563,6 +600,49 @@ resource "google_bigtable_gc_policy" "policy" { `, instanceName, instanceName, tableName, family, family) } +func testAccBigtableGCPolicyIgnoreWarning(instanceName, tableName, family string, cluster1 string, cluster2 string, gcRule string, ignoreWarnings bool) string { + return fmt.Sprintf(` +resource "google_bigtable_instance" "instance" { + name = "%s" + + cluster { + cluster_id = "%s" + num_nodes = 1 + zone = "us-central1-b" + } + + cluster { + cluster_id = "%s" + num_nodes = 1 + zone = "us-central1-c" + } + + deletion_protection = false +} + +resource "google_bigtable_table" "table" { + name = "%s" + instance_name = google_bigtable_instance.instance.id + + column_family { + family = "%s" + } +} + +resource "google_bigtable_gc_policy" "policy" { + instance_name = google_bigtable_instance.instance.id + table = google_bigtable_table.table.name + column_family = "%s" + gc_rules = < 0 && sa[0] != nil { + if !isEmptyServiceAccountBlock(d) && len(sa) > 0 && sa[0] != nil { saMap := sa[0].(map[string]interface{}) req.Email = saMap["email"].(string) req.Scopes = tpgresource.CanonicalizeServiceScopes(tpgresource.ConvertStringSet(saMap["scopes"].(*schema.Set))) @@ -3093,6 +3093,11 @@ func serviceAccountDiffSuppress(k, old, new string, d *schema.ResourceData) bool // suppress changes between { } and {scopes:[]} if l[0] != nil { contents := l[0].(map[string]interface{}) + email := contents["email"] + if email != "" { + // if email is non empty, don't suppress the diff + return false + } if scopes, ok := contents["scopes"]; ok { a := scopes.(*schema.Set).List() if a != nil && len(a) > 0 { @@ -3102,3 +3107,43 @@ func serviceAccountDiffSuppress(k, old, new string, d *schema.ResourceData) bool } return true } + +// isEmptyServiceAccountBlock is used to work around an issue when updating +// service accounts. Creating the instance with some scopes but without +// specifying a service account email, assigns default compute service account +// to the instance: +// +// service_account { +// scopes = ["some-scope"] +// } +// +// Then when updating the instance with empty service account: +// +// service_account { +// scopes = [] +// } +// +// the default Terraform behavior is to clear scopes without clearing the +// email. The email was previously computed to be the default service account +// and has not been modified, so the default plan is to leave it unchanged. +// However, when creating a new instance: +// +// service_account { +// scopes = [] +// } +// +// indicates an instance without any service account set. +// isEmptyServiceAccountBlock is used to detect empty service_account block +// and if it is, it is interpreted as no service account and no scopes. +// +func isEmptyServiceAccountBlock(d *schema.ResourceData) bool { + serviceAccountsConfig := d.GetRawConfig().GetAttr("service_account") + if serviceAccountsConfig.IsNull() || len(serviceAccountsConfig.AsValueSlice()) == 0 { + return true + } + serviceAccount := serviceAccountsConfig.AsValueSlice()[0] + if serviceAccount.GetAttr("email").IsNull() && len(serviceAccount.GetAttr("scopes").AsValueSlice()) == 0 { + return true + } + return false +} diff --git a/mmv1/third_party/terraform/services/compute/go/resource_compute_instance_test.go.tmpl b/mmv1/third_party/terraform/services/compute/go/resource_compute_instance_test.go.tmpl index 3a9d5d2bc89b..27e65e9281ec 100644 --- a/mmv1/third_party/terraform/services/compute/go/resource_compute_instance_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/go/resource_compute_instance_test.go.tmpl @@ -1109,6 +1109,54 @@ func TestAccComputeInstance_serviceAccount(t *testing.T) { }) } +func TestAccComputeInstance_noServiceAccount(t *testing.T) { + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_noServiceAccount(instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), + ), + }, + computeInstanceImportStep("us-central1-a", instanceName, []string{}), + }, + }) +} + +func TestAccComputeInstance_serviceAccountEmail_0scopes(t *testing.T) { + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_serviceAccountEmail_0scopes(instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), + ), + }, + computeInstanceImportStep("us-central1-a", instanceName, []string{}), + }, + }) +} + func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { t.Parallel() @@ -1125,6 +1173,7 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -1134,6 +1183,7 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -1143,6 +1193,7 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -1152,6 +1203,7 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), testAccCheckComputeInstanceScopes(&instance, 3), ), }, @@ -1176,6 +1228,7 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -1185,6 +1238,7 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), testAccCheckComputeInstanceScopes(&instance, 1), ), }, @@ -1194,6 +1248,7 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -4083,6 +4138,30 @@ func testAccCheckComputeInstanceServiceAccount(instance *compute.Instance, scope } } +func testAccCheckComputeInstanceNoServiceAccount(instance *compute.Instance) resource.TestCheckFunc { + return func(s *terraform.State) error { + if count := len(instance.ServiceAccounts); count != 0 { + return fmt.Errorf("Wrong number of ServiceAccounts: expected 0, got %d", count) + } + return nil + } +} + +func testAccCheckComputeInstanceMatchServiceAccount(instance *compute.Instance, serviceAcctRegexp string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if count := len(instance.ServiceAccounts); count != 1 { + return fmt.Errorf("Wrong number of ServiceAccounts: expected 1, got %d", count) + } + + email := instance.ServiceAccounts[0].Email + if !regexp.MustCompile(serviceAcctRegexp).MatchString(email) { + return fmt.Errorf("ServiceAccount email didn't match:\"%s\", got \"%s\"", serviceAcctRegexp, email) + } + + return nil + } +} + func testAccCheckComputeInstanceScopes(instance *compute.Instance, scopeCount int) resource.TestCheckFunc { return func(s *terraform.State) error { @@ -6115,6 +6194,70 @@ resource "google_compute_instance" "foobar" { `, instance) } +func testAccComputeInstance_noServiceAccount(instance string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + network = "default" + } + + service_account { + scopes = [] + } +} +`, instance) +} + +func testAccComputeInstance_serviceAccountEmail_0scopes(instance string) string { + return fmt.Sprintf(` +data "google_project" "project" {} + +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + network = "default" + } + + service_account { + email = data.google_compute_default_service_account.default.email + scopes = [] + } +} + +data "google_compute_default_service_account" "default" { +} +`, instance) +} + func testAccComputeInstance_serviceAccount_update0(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { diff --git a/mmv1/third_party/terraform/services/compute/go/resource_compute_target_https_proxy_test.go.tmpl b/mmv1/third_party/terraform/services/compute/go/resource_compute_target_https_proxy_test.go.tmpl index b66d344c9d48..7fc12c1b57e9 100644 --- a/mmv1/third_party/terraform/services/compute/go/resource_compute_target_https_proxy_test.go.tmpl +++ b/mmv1/third_party/terraform/services/compute/go/resource_compute_target_https_proxy_test.go.tmpl @@ -225,6 +225,7 @@ resource "google_compute_target_https_proxy" "foobar" { google_compute_ssl_certificate.foobar2.self_link, ] quic_override = "ENABLE" + tls_early_data = "STRICT" } resource "google_compute_backend_service" "foobar" { diff --git a/mmv1/third_party/terraform/services/compute/go/resource_compute_url_map_test.go.tmpl b/mmv1/third_party/terraform/services/compute/go/resource_compute_url_map_test.go.tmpl new file mode 100644 index 000000000000..ce955dca24d1 --- /dev/null +++ b/mmv1/third_party/terraform/services/compute/go/resource_compute_url_map_test.go.tmpl @@ -0,0 +1,1912 @@ +package compute_test + +import ( + "fmt" + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestAccComputeUrlMap_update_path_matcher(t *testing.T) { + t.Parallel() + + bsName := fmt.Sprintf("urlmap-test-%s", acctest.RandString(t, 10)) + hcName := fmt.Sprintf("urlmap-test-%s", acctest.RandString(t, 10)) + umName := fmt.Sprintf("urlmap-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_basic1(bsName, hcName, umName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + + { + Config: testAccComputeUrlMap_basic2(bsName, hcName, umName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + }, + }) +} + +func TestAccComputeUrlMap_advanced(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_advanced1(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + + { + Config: testAccComputeUrlMap_advanced2(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + }, + }) +} + +func TestAccComputeUrlMap_defaultRouteActionPathUrlRewrite(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_defaultRouteActionPathUrlRewrite(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + { + Config: testAccComputeUrlMap_defaultRouteActionPathUrlRewrite_update(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + }, + }) +} + +func TestAccComputeUrlMap_defaultRouteActionUrlRewrite(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_defaultRouteActionUrlRewrite(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + + { + Config: testAccComputeUrlMap_defaultRouteActionUrlRewrite_update(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + }, + }) +} + +func TestAccComputeUrlMap_noPathRulesWithUpdate(t *testing.T) { + t.Parallel() + + bsName := fmt.Sprintf("urlmap-test-%s", acctest.RandString(t, 10)) + hcName := fmt.Sprintf("urlmap-test-%s", acctest.RandString(t, 10)) + umName := fmt.Sprintf("urlmap-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_noPathRules(bsName, hcName, umName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + { + Config: testAccComputeUrlMap_basic1(bsName, hcName, umName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeUrlMapExists( + t, "google_compute_url_map.foobar"), + ), + }, + }, + }) +} + +func testAccCheckComputeUrlMapExists(t *testing.T, n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := acctest.GoogleProviderConfig(t) + name := rs.Primary.Attributes["name"] + + found, err := config.NewComputeClient(config.UserAgent).UrlMaps.Get( + config.Project, name).Do() + if err != nil { + return err + } + + if found.Name != name { + return fmt.Errorf("Url map not found") + } + return nil + } +} + +func TestAccComputeUrlMap_defaultRouteActionTrafficDirectorPathUpdate(t *testing.T) { + t.Parallel() + + randString := acctest.RandString(t, 10) + + bsName := fmt.Sprintf("urlmap-test-%s", randString) + hcName := fmt.Sprintf("urlmap-test-%s", randString) + umName := fmt.Sprintf("urlmap-test-%s", randString) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_defaultRouteActionTrafficDirectorPath(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeUrlMap_defaultRouteActionTrafficDirectorPathUpdate(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccComputeUrlMap_defaultRouteActionTrafficDirectorUpdate(t *testing.T) { + t.Parallel() + + randString := acctest.RandString(t, 10) + + bsName := fmt.Sprintf("urlmap-test-%s", randString) + hcName := fmt.Sprintf("urlmap-test-%s", randString) + umName := fmt.Sprintf("urlmap-test-%s", randString) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_defaultRouteActionTrafficDirector(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeUrlMap_defaultRouteActionTrafficDirectorUpdate(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccComputeUrlMap_trafficDirectorUpdate(t *testing.T) { + t.Parallel() + + randString := acctest.RandString(t, 10) + + bsName := fmt.Sprintf("urlmap-test-%s", randString) + hcName := fmt.Sprintf("urlmap-test-%s", randString) + umName := fmt.Sprintf("urlmap-test-%s", randString) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_trafficDirector(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeUrlMap_trafficDirectorUpdate(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccComputeUrlMap_trafficDirectorPathUpdate(t *testing.T) { + t.Parallel() + + randString := acctest.RandString(t, 10) + + bsName := fmt.Sprintf("urlmap-test-%s", randString) + hcName := fmt.Sprintf("urlmap-test-%s", randString) + umName := fmt.Sprintf("urlmap-test-%s", randString) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_trafficDirectorPath(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeUrlMap_trafficDirectorPathUpdate(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccComputeUrlMap_trafficDirectorRemoveRouteRule(t *testing.T) { + t.Parallel() + + randString := acctest.RandString(t, 10) + + bsName := fmt.Sprintf("urlmap-test-%s", randString) + hcName := fmt.Sprintf("urlmap-test-%s", randString) + umName := fmt.Sprintf("urlmap-test-%s", randString) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_trafficDirector(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccComputeUrlMap_trafficDirectorRemoveRouteRule(bsName, hcName, umName), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccComputeUrlMap_defaultUrlRedirect(t *testing.T) { + t.Parallel() + + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_defaultUrlRedirectConfig(randomSuffix), + }, + { + ResourceName: "google_compute_url_map.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccComputeUrlMap_urlMapCustomErrorResponsePolicyUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_urlMapCustomErrorResponsePolicy(context), + }, + { + ResourceName: "google_compute_url_map.urlmap", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"default_service"}, + }, + { + Config: testAccComputeUrlMap_urlMapCustomErrorResponsePolicyUpdate(context), + }, + { + ResourceName: "google_compute_url_map.urlmap", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"default_service"}, + }, + }, + }) +} +{{- end }} + +func testAccComputeUrlMap_basic1(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + host_rule { + hosts = ["mysite.com", "myothersite.com"] + path_matcher = "boop" + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "boop" + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.foobar.self_link + } + } + + test { + host = "mysite.com" + path = "/*" + service = google_compute_backend_service.foobar.self_link + } +} +`, bsName, hcName, umName) +} + +func testAccComputeUrlMap_basic2(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + host_rule { + hosts = ["mysite.com", "myothersite.com"] + path_matcher = "blip" + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "blip" + + path_rule { + paths = ["/*", "/home"] + service = google_compute_backend_service.foobar.self_link + } + } + + test { + host = "mysite.com" + path = "/test" + service = google_compute_backend_service.foobar.self_link + } +} +`, bsName, hcName, umName) +} + +func testAccComputeUrlMap_advanced1(suffix string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + host_rule { + hosts = ["mysite.com", "myothersite.com"] + path_matcher = "blop" + } + + host_rule { + hosts = ["myfavoritesite.com"] + path_matcher = "blip" + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "blop" + + path_rule { + paths = ["/*", "/home"] + service = google_compute_backend_service.foobar.self_link + } + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "blip" + + path_rule { + paths = ["/*", "/home"] + service = google_compute_backend_service.foobar.self_link + } + } +} +`, suffix, suffix, suffix) +} + +func testAccComputeUrlMap_advanced2(suffix string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + host_rule { + hosts = ["mysite.com", "myothersite.com"] + path_matcher = "blep" + } + + host_rule { + hosts = ["myfavoritesite.com"] + path_matcher = "blip" + } + + host_rule { + hosts = ["myleastfavoritesite.com"] + path_matcher = "blub" + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "blep" + + path_rule { + paths = ["/home"] + service = google_compute_backend_service.foobar.self_link + } + + path_rule { + paths = ["/login"] + service = google_compute_backend_service.foobar.self_link + } + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "blub" + + path_rule { + paths = ["/*", "/blub"] + service = google_compute_backend_service.foobar.self_link + } + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "blip" + + path_rule { + paths = ["/*", "/home"] + service = google_compute_backend_service.foobar.self_link + } + } +} +`, suffix, suffix, suffix) +} + +func testAccComputeUrlMap_defaultRouteActionPathUrlRewrite(suffix string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + host_rule { + hosts = ["mysite.com", "myothersite.com"] + path_matcher = "blep" + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "blep" + + path_rule { + paths = ["/home"] + service = google_compute_backend_service.foobar.self_link + } + + path_rule { + paths = ["/login"] + service = google_compute_backend_service.foobar.self_link + } + + default_route_action { + url_rewrite { + host_rewrite = "dev.example.com" + path_prefix_rewrite = "/v1/api/" + } + } + } +} +`, suffix, suffix, suffix) +} + +func testAccComputeUrlMap_defaultRouteActionPathUrlRewrite_update(suffix string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + host_rule { + hosts = ["mysite.com", "myothersite.com"] + path_matcher = "blep" + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "blep" + + path_rule { + paths = ["/home"] + service = google_compute_backend_service.foobar.self_link + } + + path_rule { + paths = ["/login"] + service = google_compute_backend_service.foobar.self_link + } + + default_route_action { + url_rewrite { + host_rewrite = "stage.example.com" # updated + path_prefix_rewrite = "/v2/api/" # updated + } + } + } +} +`, suffix, suffix, suffix) +} + +func testAccComputeUrlMap_defaultRouteActionUrlRewrite(suffix string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + default_route_action { + url_rewrite { + host_rewrite = "dev.example.com" + path_prefix_rewrite = "/v1/api/" + } + } +} +`, suffix, suffix, suffix) +} + +func testAccComputeUrlMap_defaultRouteActionUrlRewrite_update(suffix string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + default_route_action { + url_rewrite { + host_rewrite = "stage.example.com" # updated + path_prefix_rewrite = "/v2/api/" # updated + } + } +} +`, suffix, suffix, suffix) +} + +func testAccComputeUrlMap_noPathRules(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foobar" { + name = "urlmap-test-%s" + health_checks = [google_compute_http_health_check.zero.self_link] +} + +resource "google_compute_http_health_check" "zero" { + name = "urlmap-test-%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_service = google_compute_backend_service.foobar.self_link + + host_rule { + hosts = ["mysite.com", "myothersite.com"] + path_matcher = "boop" + } + + path_matcher { + default_service = google_compute_backend_service.foobar.self_link + name = "boop" + } + + test { + host = "mysite.com" + path = "/*" + service = google_compute_backend_service.foobar.self_link + } +} +`, bsName, hcName, umName) +} + +func testAccComputeUrlMap_trafficDirector(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + default_service = "${google_compute_backend_service.home.self_link}" + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = "${google_compute_backend_service.home.self_link}" + + route_rules { + priority = 1 + header_action { + request_headers_to_remove = ["RemoveMe2"] + request_headers_to_add { + header_name = "AddSomethingElse" + header_value = "MyOtherValue" + replace = true + } + response_headers_to_remove = ["RemoveMe3"] + response_headers_to_add { + header_name = "AddMe" + header_value = "MyValue" + replace = false + } + } + match_rules { + full_path_match = "a full path" + header_matches { + header_name = "someheader" + exact_match = "match this exactly" + invert_match = true + } + ignore_case = true + metadata_filters { + filter_match_criteria = "MATCH_ANY" + filter_labels { + name = "PLANET" + value = "MARS" + } + } + query_parameter_matches { + name = "a query parameter" + present_match = true + } + } + url_redirect { + host_redirect = "A host" + https_redirect = false + path_redirect = "some/path" + redirect_response_code = "TEMPORARY_REDIRECT" + strip_query = true + } + } + } + + test { + service = "${google_compute_backend_service.home.self_link}" + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} + +`, umName, bsName, bsName, hcName) +} + +func testAccComputeUrlMap_trafficDirectorUpdate(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + default_service = "${google_compute_backend_service.home2.self_link}" + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths2" + } + + path_matcher { + name = "allpaths2" + default_service = "${google_compute_backend_service.home2.self_link}" + + route_rules { + priority = 2 + header_action { + request_headers_to_remove = ["RemoveMe2", "AndMe"] + request_headers_to_add { + header_name = "AddSomethingElseUpdated" + header_value = "MyOtherValueUpdated" + replace = false + } + response_headers_to_remove = ["RemoveMe3", "AndMe4"] + } + match_rules { + full_path_match = "a full path to match" + header_matches { + header_name = "someheaderfoo" + exact_match = "match this exactly again" + invert_match = false + } + ignore_case = false + metadata_filters { + filter_match_criteria = "MATCH_ALL" + filter_labels { + name = "PLANET" + value = "EARTH" + } + } + } + url_redirect { + host_redirect = "A host again" + https_redirect = true + path_redirect = "some/path/twice" + redirect_response_code = "TEMPORARY_REDIRECT" + strip_query = false + } + } + } + + test { + service = "${google_compute_backend_service.home.self_link}" + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} +`, umName, bsName, bsName, hcName) +} + +func testAccComputeUrlMap_trafficDirectorRemoveRouteRule(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + default_service = "${google_compute_backend_service.home2.self_link}" + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths2" + } + + path_matcher { + name = "allpaths2" + default_service = "${google_compute_backend_service.home2.self_link}" + } + + test { + service = "${google_compute_backend_service.home.self_link}" + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} +`, umName, bsName, bsName, hcName) +} + +func testAccComputeUrlMap_trafficDirectorPath(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + default_service = "${google_compute_backend_service.home.self_link}" + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + default_service = "${google_compute_backend_service.home.self_link}" + + path_rule { + paths = ["/home"] + route_action { + cors_policy { + allow_credentials = true + allow_headers = ["Allowed content"] + allow_methods = ["GET"] + allow_origin_regexes = ["abc.*"] + allow_origins = ["Allowed origin"] + expose_headers = ["Exposed header"] + max_age = 30 + disabled = true + } + fault_injection_policy { + abort { + http_status = 234 + percentage = 5.6 + } + delay { + fixed_delay { + seconds = 0 + nanos = 50000 + } + percentage = 7.8 + } + } + request_mirror_policy { + backend_service = "${google_compute_backend_service.home.self_link}" + } + retry_policy { + num_retries = 4 + per_try_timeout { + seconds = 30 + } + retry_conditions = ["5xx", "deadline-exceeded"] + } + timeout { + seconds = 20 + nanos = 750000000 + } + url_rewrite { + host_rewrite = "dev.example.com" + path_prefix_rewrite = "/v1/api/" + } + weighted_backend_services { + backend_service = "${google_compute_backend_service.home.self_link}" + weight = 400 + header_action { + request_headers_to_remove = ["RemoveMe"] + request_headers_to_add { + header_name = "AddMe" + header_value = "MyValue" + replace = true + } + response_headers_to_remove = ["RemoveMe"] + response_headers_to_add { + header_name = "AddMe" + header_value = "MyValue" + replace = false + } + } + } + } + } + } + + test { + service = "${google_compute_backend_service.home.self_link}" + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} + +`, umName, bsName, bsName, hcName) +} + +func testAccComputeUrlMap_trafficDirectorPathUpdate(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + default_service = "${google_compute_backend_service.home2.self_link}" + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths2" + } + + path_matcher { + name = "allpaths2" + default_service = "${google_compute_backend_service.home.self_link}" + + path_rule { + paths = ["/homeupdated"] + route_action { + cors_policy { + allow_credentials = false + allow_headers = ["Allowed content updated"] + allow_methods = ["PUT"] + allow_origin_regexes = ["abcdef.*"] + allow_origins = ["Allowed origin updated"] + expose_headers = ["Exposed header updated"] + max_age = 31 + disabled = false + } + fault_injection_policy { + abort { + http_status = 235 + percentage = 6.7 + } + delay { + fixed_delay { + seconds = 1 + nanos = 40000 + } + percentage = 8.9 + } + } + request_mirror_policy { + backend_service = "${google_compute_backend_service.home.self_link}" + } + retry_policy { + num_retries = 5 + per_try_timeout { + seconds = 31 + } + retry_conditions = ["5xx"] + } + timeout { + seconds = 21 + nanos = 760000000 + } + url_rewrite { + host_rewrite = "stage.example.com" # updated + path_prefix_rewrite = "/v2/api/" # updated + } + weighted_backend_services { + backend_service = "${google_compute_backend_service.home.self_link}" + weight = 400 + header_action { + request_headers_to_remove = ["RemoveMeUpdated"] + request_headers_to_add { + header_name = "AddMeUpdated" + header_value = "MyValueUpdated" + replace = false + } + response_headers_to_remove = ["RemoveMeUpdated"] + response_headers_to_add { + header_name = "AddMeUpdated" + header_value = "MyValueUpdated" + replace = true + } + } + } + } + } + } + + test { + service = "${google_compute_backend_service.home.self_link}" + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = ["${google_compute_health_check.default.self_link}"] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} +`, umName, bsName, bsName, hcName) +} + +func testAccComputeUrlMap_defaultRouteActionTrafficDirectorPath(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + default_service = google_compute_backend_service.home.self_link + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths" + } + + path_matcher { + name = "allpaths" + + default_route_action { + cors_policy { + allow_credentials = true + allow_headers = ["Allowed content"] + allow_methods = ["GET"] + allow_origin_regexes = ["abc.*"] + allow_origins = ["Allowed origin"] + expose_headers = ["Exposed header"] + max_age = 30 + disabled = true + } + fault_injection_policy { + abort { + http_status = 234 + percentage = 5.6 + } + delay { + fixed_delay { + seconds = 0 + nanos = 50000 + } + percentage = 7.8 + } + } + request_mirror_policy { + backend_service = google_compute_backend_service.home.self_link + } + retry_policy { + num_retries = 4 + per_try_timeout { + seconds = 30 + } + retry_conditions = ["5xx", "deadline-exceeded"] + } + timeout { + seconds = 20 + nanos = 750000000 + } + url_rewrite { + host_rewrite = "dev.example.com" + path_prefix_rewrite = "/v1/api/" + } + weighted_backend_services { + backend_service = google_compute_backend_service.home.self_link + weight = 400 + header_action { + request_headers_to_remove = ["RemoveMe"] + request_headers_to_add { + header_name = "AddMe" + header_value = "MyValue" + replace = true + } + response_headers_to_remove = ["RemoveMe"] + response_headers_to_add { + header_name = "AddMe" + header_value = "MyValue" + replace = false + } + } + } + } + } + + test { + service = google_compute_backend_service.home.self_link + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.default.self_link] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.default.self_link] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} + +`, umName, bsName, bsName, hcName) +} + +func testAccComputeUrlMap_defaultRouteActionTrafficDirectorPathUpdate(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + default_service = google_compute_backend_service.home2.self_link + + host_rule { + hosts = ["mysite.com"] + path_matcher = "allpaths2" + } + + path_matcher { + name = "allpaths2" + + default_route_action { + cors_policy { + allow_credentials = false + allow_headers = ["Allowed content updated"] + allow_methods = ["PUT"] + allow_origin_regexes = ["abcdef.*"] + allow_origins = ["Allowed origin updated"] + expose_headers = ["Exposed header updated"] + max_age = 31 + disabled = false + } + fault_injection_policy { + abort { + http_status = 235 + percentage = 6.7 + } + delay { + fixed_delay { + seconds = 1 + nanos = 40000 + } + percentage = 8.9 + } + } + request_mirror_policy { + backend_service = google_compute_backend_service.home.self_link + } + retry_policy { + num_retries = 5 + per_try_timeout { + seconds = 31 + } + retry_conditions = ["5xx"] + } + timeout { + seconds = 21 + nanos = 760000000 + } + url_rewrite { + host_rewrite = "stage.example.com" # updated + path_prefix_rewrite = "/v2/api/" # updated + } + weighted_backend_services { + backend_service = google_compute_backend_service.home.self_link + weight = 400 + header_action { + request_headers_to_remove = ["RemoveMeUpdated"] + request_headers_to_add { + header_name = "AddMeUpdated" + header_value = "MyValueUpdated" + replace = false + } + response_headers_to_remove = ["RemoveMeUpdated"] + response_headers_to_add { + header_name = "AddMeUpdated" + header_value = "MyValueUpdated" + replace = true + } + } + } + } + } + + test { + service = google_compute_backend_service.home.self_link + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.default.self_link] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.default.self_link] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} +`, umName, bsName, bsName, hcName) +} + + +func testAccComputeUrlMap_defaultRouteActionTrafficDirector(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + + default_route_action { + cors_policy { + allow_credentials = true + allow_headers = ["Allowed content"] + allow_methods = ["GET"] + allow_origin_regexes = ["abc.*"] + allow_origins = ["Allowed origin"] + expose_headers = ["Exposed header"] + max_age = 30 + disabled = true + } + fault_injection_policy { + abort { + http_status = 234 + percentage = 5.6 + } + delay { + fixed_delay { + seconds = 0 + nanos = 50000 + } + percentage = 7.8 + } + } + request_mirror_policy { + backend_service = google_compute_backend_service.home.self_link + } + retry_policy { + num_retries = 4 + per_try_timeout { + seconds = 30 + } + retry_conditions = ["5xx", "deadline-exceeded"] + } + timeout { + seconds = 20 + nanos = 750000000 + } + url_rewrite { + host_rewrite = "dev.example.com" + path_prefix_rewrite = "/v1/api/" + } + weighted_backend_services { + backend_service = google_compute_backend_service.home.self_link + weight = 400 + header_action { + request_headers_to_remove = ["RemoveMe"] + request_headers_to_add { + header_name = "AddMe" + header_value = "MyValue" + replace = true + } + response_headers_to_remove = ["RemoveMe"] + response_headers_to_add { + header_name = "AddMe" + header_value = "MyValue" + replace = false + } + } + } + } + + test { + service = google_compute_backend_service.home.self_link + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.default.self_link] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.default.self_link] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} + +`, umName, bsName, bsName, hcName) +} + +func testAccComputeUrlMap_defaultRouteActionTrafficDirectorUpdate(bsName, hcName, umName string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "%s" + description = "a description" + + default_route_action { + cors_policy { + allow_credentials = false + allow_headers = ["Allowed content updated"] + allow_methods = ["PUT"] + allow_origin_regexes = ["abcdef.*"] + allow_origins = ["Allowed origin updated"] + expose_headers = ["Exposed header updated"] + max_age = 31 + disabled = false + } + fault_injection_policy { + abort { + http_status = 235 + percentage = 6.7 + } + delay { + fixed_delay { + seconds = 1 + nanos = 40000 + } + percentage = 8.9 + } + } + request_mirror_policy { + backend_service = google_compute_backend_service.home2.self_link + } + retry_policy { + num_retries = 5 + per_try_timeout { + seconds = 31 + } + retry_conditions = ["5xx"] + } + timeout { + seconds = 21 + nanos = 760000000 + } + url_rewrite { + host_rewrite = "stage.example.com" # updated + path_prefix_rewrite = "/v2/api/" # updated + } + weighted_backend_services { + backend_service = google_compute_backend_service.home2.self_link + weight = 400 + header_action { + request_headers_to_remove = ["RemoveMeUpdated"] + request_headers_to_add { + header_name = "AddMeUpdated" + header_value = "MyValueUpdated" + replace = false + } + response_headers_to_remove = ["RemoveMeUpdated"] + response_headers_to_add { + header_name = "AddMeUpdated" + header_value = "MyValueUpdated" + replace = true + } + } + } + } + + test { + service = google_compute_backend_service.home2.self_link + host = "hi.com" + path = "/home" + } +} + +resource "google_compute_backend_service" "home" { + name = "%s" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.default.self_link] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_backend_service" "home2" { + name = "%s-2" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + + health_checks = [google_compute_health_check.default.self_link] + load_balancing_scheme = "INTERNAL_SELF_MANAGED" +} + +resource "google_compute_health_check" "default" { + name = "%s" + http_health_check { + port = 80 + } +} +`, umName, bsName, bsName, hcName) +} + +func testAccComputeUrlMap_defaultUrlRedirectConfig(randomSuffix string) string { + return fmt.Sprintf(` +resource "google_compute_url_map" "foobar" { + name = "urlmap-test-%s" + default_url_redirect { + https_redirect = true + strip_query = false + } +} +`, randomSuffix) +} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccComputeUrlMap_urlMapCustomErrorResponsePolicy(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_url_map" "urlmap" { + provider = google-beta + name = "urlmap%{random_suffix}" + description = "a description" + + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx"] # All 5xx responses will be catched + path = "/*" + override_response_code = 502 + } + error_service = google_compute_backend_bucket.error.id + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "mysite" + } + + path_matcher { + name = "mysite" + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["4xx", "5xx"] # All 4xx and 5xx responses will be catched on path login + path = "/login" + override_response_code = 404 + } + error_response_rule { + match_response_codes = ["503"] # Only a 503 response will be catched on path example + path = "/example" + override_response_code = 502 + } + error_service = google_compute_backend_bucket.error.id + } + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.example.id + + custom_error_response_policy { + error_response_rule { + match_response_codes = ["4xx"] + path = "/register" + override_response_code = 401 + } + error_service = google_compute_backend_bucket.error.id + } + } + } +} + +resource "google_compute_backend_service" "example" { + provider = google-beta + name = "login%{random_suffix}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_http_health_check.default.id] +} + +resource "google_compute_http_health_check" "default" { + provider = google-beta + name = "tf-test-health-check%{random_suffix}" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_backend_bucket" "error" { + provider = google-beta + name = "tf-test-error-backend-bucket%{random_suffix}" + bucket_name = google_storage_bucket.error.name + enable_cdn = true +} + +resource "google_storage_bucket" "error" { + provider = google-beta + name = "tf-test-static-asset-bucket%{random_suffix}" + location = "US" +} +`, context) +} +{{- end }} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccComputeUrlMap_urlMapCustomErrorResponsePolicyUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_url_map" "urlmap" { + provider = google-beta + name = "urlmap%{random_suffix}" + description = "a description" + + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx", "4xx"] # All 5xx responses will be catched + path = "/test/*" + override_response_code = 503 + } + error_service = google_compute_backend_bucket.error.id + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "mysite" + } + + path_matcher { + name = "mysite" + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx"] # All 4xx and 5xx responses will be catched on path login + path = "/*" + override_response_code = 502 + } + error_response_rule { + match_response_codes = ["4xx"] # Only a 503 response will be catched on path example + path = "/example/test" + override_response_code = 400 + } + error_service = google_compute_backend_bucket.error.id + } + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.example.id + + custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx"] + path = "/register/example/*" + override_response_code = 403 + } + error_service = google_compute_backend_bucket.error.id + } + } + } +} + +resource "google_compute_backend_service" "example" { + provider = google-beta + name = "login%{random_suffix}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_http_health_check.default.id] +} + +resource "google_compute_http_health_check" "default" { + provider = google-beta + name = "tf-test-health-check%{random_suffix}" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_backend_bucket" "error" { + provider = google-beta + name = "tf-test-error-backend-bucket-2%{random_suffix}" + bucket_name = google_storage_bucket.error.name + enable_cdn = true + + lifecycle { + create_before_destroy = true + } +} + +resource "google_storage_bucket" "error" { + provider = google-beta + name = "tf-test-static-asset-bucket-2%{random_suffix}" + location = "US" +} +`, context) +} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.erb index 220f8ddf3099..f292b032a776 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance.go.erb @@ -2475,7 +2475,7 @@ func resourceComputeInstanceUpdate(d *schema.ResourceData, meta interface{}) err if d.HasChange("service_account.0.email") || scopesChange { sa := d.Get("service_account").([]interface{}) req := &compute.InstancesSetServiceAccountRequest{ForceSendFields: []string{"email"}} - if len(sa) > 0 && sa[0] != nil { + if !isEmptyServiceAccountBlock(d) && len(sa) > 0 && sa[0] != nil { saMap := sa[0].(map[string]interface{}) req.Email = saMap["email"].(string) req.Scopes = tpgresource.CanonicalizeServiceScopes(tpgresource.ConvertStringSet(saMap["scopes"].(*schema.Set))) @@ -3094,6 +3094,11 @@ func serviceAccountDiffSuppress(k, old, new string, d *schema.ResourceData) bool // suppress changes between { } and {scopes:[]} if l[0] != nil { contents := l[0].(map[string]interface{}) + email := contents["email"] + if email != "" { + // if email is non empty, don't suppress the diff + return false + } if scopes, ok := contents["scopes"]; ok { a := scopes.(*schema.Set).List() if a != nil && len(a) > 0 { @@ -3103,3 +3108,43 @@ func serviceAccountDiffSuppress(k, old, new string, d *schema.ResourceData) bool } return true } + +// isEmptyServiceAccountBlock is used to work around an issue when updating +// service accounts. Creating the instance with some scopes but without +// specifying a service account email, assigns default compute service account +// to the instance: +// +// service_account { +// scopes = ["some-scope"] +// } +// +// Then when updating the instance with empty service account: +// +// service_account { +// scopes = [] +// } +// +// the default Terraform behavior is to clear scopes without clearing the +// email. The email was previously computed to be the default service account +// and has not been modified, so the default plan is to leave it unchanged. +// However, when creating a new instance: +// +// service_account { +// scopes = [] +// } +// +// indicates an instance without any service account set. +// isEmptyServiceAccountBlock is used to detect empty service_account block +// and if it is, it is interpreted as no service account and no scopes. +// +func isEmptyServiceAccountBlock(d *schema.ResourceData) bool { + serviceAccountsConfig := d.GetRawConfig().GetAttr("service_account") + if serviceAccountsConfig.IsNull() || len(serviceAccountsConfig.AsValueSlice()) == 0 { + return true + } + serviceAccount := serviceAccountsConfig.AsValueSlice()[0] + if serviceAccount.GetAttr("email").IsNull() && len(serviceAccount.GetAttr("scopes").AsValueSlice()) == 0 { + return true + } + return false +} diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.erb index e2eb240b706a..dde0a9397a76 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_instance_test.go.erb @@ -1110,6 +1110,54 @@ func TestAccComputeInstance_serviceAccount(t *testing.T) { }) } +func TestAccComputeInstance_noServiceAccount(t *testing.T) { + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_noServiceAccount(instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), + ), + }, + computeInstanceImportStep("us-central1-a", instanceName, []string{}), + }, + }) +} + +func TestAccComputeInstance_serviceAccountEmail_0scopes(t *testing.T) { + t.Parallel() + + var instance compute.Instance + var instanceName = fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckComputeInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeInstance_serviceAccountEmail_0scopes(instanceName), + Check: resource.ComposeTestCheckFunc( + testAccCheckComputeInstanceExists( + t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), + ), + }, + computeInstanceImportStep("us-central1-a", instanceName, []string{}), + }, + }) +} + func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { t.Parallel() @@ -1126,6 +1174,7 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -1135,6 +1184,7 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -1144,6 +1194,7 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -1153,6 +1204,7 @@ func TestAccComputeInstance_serviceAccount_updated(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), testAccCheckComputeInstanceScopes(&instance, 3), ), }, @@ -1177,6 +1229,7 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -1186,6 +1239,7 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceMatchServiceAccount(&instance, "\\d+-compute@developer.gserviceaccount.com"), testAccCheckComputeInstanceScopes(&instance, 1), ), }, @@ -1195,6 +1249,7 @@ func TestAccComputeInstance_serviceAccount_updated0to1to0scopes(t *testing.T) { Check: resource.ComposeTestCheckFunc( testAccCheckComputeInstanceExists( t, "google_compute_instance.foobar", &instance), + testAccCheckComputeInstanceNoServiceAccount(&instance), testAccCheckComputeInstanceScopes(&instance, 0), ), }, @@ -4084,6 +4139,30 @@ func testAccCheckComputeInstanceServiceAccount(instance *compute.Instance, scope } } +func testAccCheckComputeInstanceNoServiceAccount(instance *compute.Instance) resource.TestCheckFunc { + return func(s *terraform.State) error { + if count := len(instance.ServiceAccounts); count != 0 { + return fmt.Errorf("Wrong number of ServiceAccounts: expected 0, got %d", count) + } + return nil + } +} + +func testAccCheckComputeInstanceMatchServiceAccount(instance *compute.Instance, serviceAcctRegexp string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if count := len(instance.ServiceAccounts); count != 1 { + return fmt.Errorf("Wrong number of ServiceAccounts: expected 1, got %d", count) + } + + email := instance.ServiceAccounts[0].Email + if !regexp.MustCompile(serviceAcctRegexp).MatchString(email) { + return fmt.Errorf("ServiceAccount email didn't match:\"%s\", got \"%s\"", serviceAcctRegexp, email) + } + + return nil + } +} + func testAccCheckComputeInstanceScopes(instance *compute.Instance, scopeCount int) resource.TestCheckFunc { return func(s *terraform.State) error { @@ -6116,6 +6195,70 @@ resource "google_compute_instance" "foobar" { `, instance) } +func testAccComputeInstance_noServiceAccount(instance string) string { + return fmt.Sprintf(` +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + network = "default" + } + + service_account { + scopes = [] + } +} +`, instance) +} + +func testAccComputeInstance_serviceAccountEmail_0scopes(instance string) string { + return fmt.Sprintf(` +data "google_project" "project" {} + +data "google_compute_image" "my_image" { + family = "debian-11" + project = "debian-cloud" +} + +resource "google_compute_instance" "foobar" { + name = "%s" + machine_type = "e2-medium" + zone = "us-central1-a" + + boot_disk { + initialize_params { + image = data.google_compute_image.my_image.self_link + } + } + + network_interface { + network = "default" + } + + service_account { + email = data.google_compute_default_service_account.default.email + scopes = [] + } +} + +data "google_compute_default_service_account" "default" { +} +`, instance) +} + func testAccComputeInstance_serviceAccount_update0(instance string) string { return fmt.Sprintf(` data "google_compute_image" "my_image" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go index 21685e85f191..e28f8d3def33 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go +++ b/mmv1/third_party/terraform/services/compute/resource_compute_shared_reservation_update_test.go @@ -9,7 +9,6 @@ import ( ) func TestAccComputeSharedReservation_update(t *testing.T) { - acctest.SkipIfVcr(t) // large number of parallel resources. t.Parallel() context := map[string]interface{}{ diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_target_https_proxy_test.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_target_https_proxy_test.go.erb index bb973d853080..6ff35b6b607c 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_target_https_proxy_test.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_target_https_proxy_test.go.erb @@ -226,6 +226,7 @@ resource "google_compute_target_https_proxy" "foobar" { google_compute_ssl_certificate.foobar2.self_link, ] quic_override = "ENABLE" + tls_early_data = "STRICT" } resource "google_compute_backend_service" "foobar" { diff --git a/mmv1/third_party/terraform/services/compute/resource_compute_url_map_test.go.erb b/mmv1/third_party/terraform/services/compute/resource_compute_url_map_test.go.erb index b5fd8df9f2aa..dfc72c901701 100644 --- a/mmv1/third_party/terraform/services/compute/resource_compute_url_map_test.go.erb +++ b/mmv1/third_party/terraform/services/compute/resource_compute_url_map_test.go.erb @@ -349,7 +349,6 @@ func TestAccComputeUrlMap_defaultUrlRedirect(t *testing.T) { acctest.VcrTest(t, resource.TestCase{ PreCheck: func() { acctest.AccTestPreCheck(t) }, ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), - CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccComputeUrlMap_defaultUrlRedirectConfig(randomSuffix), @@ -363,6 +362,42 @@ func TestAccComputeUrlMap_defaultUrlRedirect(t *testing.T) { }) } +<% unless version == 'ga' -%> +func TestAccComputeUrlMap_urlMapCustomErrorResponsePolicyUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckComputeUrlMapDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccComputeUrlMap_urlMapCustomErrorResponsePolicy(context), + }, + { + ResourceName: "google_compute_url_map.urlmap", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"default_service"}, + }, + { + Config: testAccComputeUrlMap_urlMapCustomErrorResponsePolicyUpdate(context), + }, + { + ResourceName: "google_compute_url_map.urlmap", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"default_service"}, + }, + }, + }) +} +<% end -%> + func testAccComputeUrlMap_basic1(bsName, hcName, umName string) string { return fmt.Sprintf(` resource "google_compute_backend_service" "foobar" { @@ -1686,3 +1721,193 @@ resource "google_compute_url_map" "foobar" { } `, randomSuffix) } + +<% unless version == 'ga' -%> +func testAccComputeUrlMap_urlMapCustomErrorResponsePolicy(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_url_map" "urlmap" { + provider = google-beta + name = "urlmap%{random_suffix}" + description = "a description" + + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx"] # All 5xx responses will be catched + path = "/*" + override_response_code = 502 + } + error_service = google_compute_backend_bucket.error.id + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "mysite" + } + + path_matcher { + name = "mysite" + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["4xx", "5xx"] # All 4xx and 5xx responses will be catched on path login + path = "/login" + override_response_code = 404 + } + error_response_rule { + match_response_codes = ["503"] # Only a 503 response will be catched on path example + path = "/example" + override_response_code = 502 + } + error_service = google_compute_backend_bucket.error.id + } + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.example.id + + custom_error_response_policy { + error_response_rule { + match_response_codes = ["4xx"] + path = "/register" + override_response_code = 401 + } + error_service = google_compute_backend_bucket.error.id + } + } + } +} + +resource "google_compute_backend_service" "example" { + provider = google-beta + name = "login%{random_suffix}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_http_health_check.default.id] +} + +resource "google_compute_http_health_check" "default" { + provider = google-beta + name = "tf-test-health-check%{random_suffix}" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_backend_bucket" "error" { + provider = google-beta + name = "tf-test-error-backend-bucket%{random_suffix}" + bucket_name = google_storage_bucket.error.name + enable_cdn = true +} + +resource "google_storage_bucket" "error" { + provider = google-beta + name = "tf-test-static-asset-bucket%{random_suffix}" + location = "US" +} +`, context) +} +<% end -%> + +<% unless version == 'ga' -%> +func testAccComputeUrlMap_urlMapCustomErrorResponsePolicyUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_url_map" "urlmap" { + provider = google-beta + name = "urlmap%{random_suffix}" + description = "a description" + + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx", "4xx"] # All 5xx responses will be catched + path = "/test/*" + override_response_code = 503 + } + error_service = google_compute_backend_bucket.error.id + } + + host_rule { + hosts = ["mysite.com"] + path_matcher = "mysite" + } + + path_matcher { + name = "mysite" + default_service = google_compute_backend_service.example.id + + default_custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx"] # All 4xx and 5xx responses will be catched on path login + path = "/*" + override_response_code = 502 + } + error_response_rule { + match_response_codes = ["4xx"] # Only a 503 response will be catched on path example + path = "/example/test" + override_response_code = 400 + } + error_service = google_compute_backend_bucket.error.id + } + + path_rule { + paths = ["/*"] + service = google_compute_backend_service.example.id + + custom_error_response_policy { + error_response_rule { + match_response_codes = ["5xx"] + path = "/register/example/*" + override_response_code = 403 + } + error_service = google_compute_backend_bucket.error.id + } + } + } +} + +resource "google_compute_backend_service" "example" { + provider = google-beta + name = "login%{random_suffix}" + port_name = "http" + protocol = "HTTP" + timeout_sec = 10 + load_balancing_scheme = "EXTERNAL_MANAGED" + + health_checks = [google_compute_http_health_check.default.id] +} + +resource "google_compute_http_health_check" "default" { + provider = google-beta + name = "tf-test-health-check%{random_suffix}" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_compute_backend_bucket" "error" { + provider = google-beta + name = "tf-test-error-backend-bucket-2%{random_suffix}" + bucket_name = google_storage_bucket.error.name + enable_cdn = true + + lifecycle { + create_before_destroy = true + } +} + +resource "google_storage_bucket" "error" { + provider = google-beta + name = "tf-test-static-asset-bucket-2%{random_suffix}" + location = "US" +} +`, context) +} +<% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/dialogflowcx/go/resource_dialogflowcx_agent_test.go b/mmv1/third_party/terraform/services/dialogflowcx/go/resource_dialogflowcx_agent_test.go new file mode 100644 index 000000000000..d3b757abfd6f --- /dev/null +++ b/mmv1/third_party/terraform/services/dialogflowcx/go/resource_dialogflowcx_agent_test.go @@ -0,0 +1,138 @@ +package dialogflowcx_test + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDialogflowCXAgent_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDialogflowCXAgent_basic(context), + }, + { + ResourceName: "google_dialogflow_cx_agent.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token"}, + }, + { + Config: testAccDialogflowCXAgent_full(context), + }, + { + ResourceName: "google_dialogflow_cx_agent.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token"}, + }, + { + Config: testAccDialogflowCXAgent_removeSettings(context), + }, + { + ResourceName: "google_dialogflow_cx_agent.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"git_integration_settings.0.github_settings.0.access_token"}, + }, + }, + }) +} + +func testAccDialogflowCXAgent_basic(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dialogflow_cx_agent" "foobar" { + display_name = "tf-test-%{random_suffix}" + location = "global" + default_language_code = "en" + supported_language_codes = ["fr","de","es"] + time_zone = "America/New_York" + description = "Description 1." + avatar_uri = "https://storage.cloud.google.com/dialogflow-test-host-image/cloud-logo.png" + } + `, context) +} + +func testAccDialogflowCXAgent_full(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_storage_bucket" "bucket" { + name = "tf-test-dialogflowcx-bucket%{random_suffix}" + location = "US" + uniform_bucket_level_access = true + } + + resource "google_dialogflow_cx_agent" "foobar" { + display_name = "tf-test-%{random_suffix}update" + location = "global" + default_language_code = "en" + supported_language_codes = ["no"] + time_zone = "Europe/London" + description = "Description 2!" + avatar_uri = "https://storage.cloud.google.com/dialogflow-test-host-image/cloud-logo-2.png" + enable_stackdriver_logging = true + enable_spell_correction = true + speech_to_text_settings { + enable_speech_adaptation = true + } + advanced_settings { + audio_export_gcs_destination { + uri = "${google_storage_bucket.bucket.url}/prefix-" + } + dtmf_settings { + enabled = true + max_digits = 1 + finish_digit = "#" + } + } + git_integration_settings { + github_settings { + display_name = "Github Repo" + repository_uri = "https://api.github.com/repos/githubtraining/hellogitworld" + tracking_branch = "main" + access_token = "secret-token" + branches = ["main"] + } + } + text_to_speech_settings { + synthesize_speech_configs = jsonencode({ + en = { + voice = { + name = "en-US-Neural2-A" + } + } + }) + } + } + `, context) +} + +func testAccDialogflowCXAgent_removeSettings(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_dialogflow_cx_agent" "foobar" { + display_name = "tf-test-%{random_suffix}" + location = "global" + default_language_code = "en" + supported_language_codes = ["fr","de","es"] + time_zone = "America/New_York" + description = "Description 1." + avatar_uri = "https://storage.cloud.google.com/dialogflow-test-host-image/cloud-logo.png" + advanced_settings {} + git_integration_settings {} + text_to_speech_settings {} + } + `, context) +} diff --git a/mmv1/third_party/terraform/services/dns/go/data_source_dns_key_test.go b/mmv1/third_party/terraform/services/dns/go/data_source_dns_key_test.go new file mode 100644 index 000000000000..d05066a2f7bb --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/go/data_source_dns_key_test.go @@ -0,0 +1,178 @@ +package dns_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceDNSKeys_basic(t *testing.T) { + t.Parallel() + + dnsZoneName := fmt.Sprintf("tf-test-dnskey-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDNSKeysConfig(dnsZoneName, "on"), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceDNSKeysDSRecordCheck("data.google_dns_keys.foo_dns_key"), + resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key", "key_signing_keys.#", "1"), + resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key", "zone_signing_keys.#", "1"), + resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key_id", "key_signing_keys.#", "1"), + resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key_id", "zone_signing_keys.#", "1"), + ), + }, + }, + }) +} + + +func TestAccDataSourceDNSKeys_noDnsSec(t *testing.T) { + t.Parallel() + + dnsZoneName := fmt.Sprintf("tf-test-dnskey-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDNSKeysConfig(dnsZoneName, "off"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key", "key_signing_keys.#", "0"), + resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key", "zone_signing_keys.#", "0"), + ), + }, + }, + }) +} + +func testAccDataSourceDNSKeysDSRecordCheck(datasourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[datasourceName] + if !ok { + return fmt.Errorf("root module has no resource called %s", datasourceName) + } + + if ds.Primary.Attributes["key_signing_keys.0.ds_record"] == "" { + return fmt.Errorf("DS record not found in data source") + } + + return nil + } +} + +func testAccDataSourceDNSKeysConfig(dnsZoneName, dnssecStatus string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foo" { + name = "%s" + dns_name = "dnssec.gcp.tfacc.hashicorptest.com." + + dnssec_config { + state = "%s" + non_existence = "nsec3" + } +} + +data "google_dns_keys" "foo_dns_key" { + managed_zone = google_dns_managed_zone.foo.name +} + +data "google_dns_keys" "foo_dns_key_id" { + managed_zone = google_dns_managed_zone.foo.id +} +`, dnsZoneName, dnssecStatus) +} + +// TestAccDataSourceDNSKeys_basic_AdcAuth is the same as TestAccDataSourceDNSKeys_basic but the test enforces that a developer runs this using +// ADCs, supplied via GOOGLE_APPLICATION_CREDENTIALS. If any other credentials ENVs are set the PreCheck will fail. +// Commented out until this test can run in TeamCity/CI. +// func TestAccDataSourceDNSKeys_basic_AdcAuth(t *testing.T) { +// acctest.SkipIfVcr(t) // Uses external providers +// t.Parallel() + +// creds := os.Getenv("GOOGLE_APPLICATION_CREDENTIALS") // PreCheck assertion handles checking this is set + +// dnsZoneName := fmt.Sprintf("tf-test-dnskey-test-%s", acctest.RandString(t, 10)) + +// context := map[string]interface{}{ +// "credentials_path": creds, +// "dns_zone_name": dnsZoneName, +// "dnssec_status": "on", +// } + +// acctest.VcrTest(t, resource.TestCase{ +// PreCheck: func() { acctest.AccTestPreCheck_AdcCredentialsOnly(t) }, // Note different than default +// CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), +// Steps: []resource.TestStep{ +// // Check test fails with version of provider where data source is implemented with PF +// { +// ExternalProviders: map[string]resource.ExternalProvider{ +// "google": { +// VersionConstraint: "4.60.0", // Muxed provider with dns data sources migrated to PF +// Source: "hashicorp/google", +// }, +// }, +// ExpectError: regexp.MustCompile("Post \"https://oauth2.googleapis.com/token\": context canceled"), +// Config: testAccDataSourceDNSKeysConfig_AdcCredentials(context), +// Check: resource.ComposeTestCheckFunc( +// testAccDataSourceDNSKeysDSRecordCheck("data.google_dns_keys.foo_dns_key"), +// resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key", "key_signing_keys.#", "1"), +// resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key", "zone_signing_keys.#", "1"), +// resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key_id", "key_signing_keys.#", "1"), +// resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key_id", "zone_signing_keys.#", "1"), +// ), +// }, +// // Test should pass with more recent code +// { +// ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), +// Config: testAccDataSourceDNSKeysConfig_AdcCredentials(context), +// Check: resource.ComposeTestCheckFunc( +// testAccDataSourceDNSKeysDSRecordCheck("data.google_dns_keys.foo_dns_key"), +// resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key", "key_signing_keys.#", "1"), +// resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key", "zone_signing_keys.#", "1"), +// resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key_id", "key_signing_keys.#", "1"), +// resource.TestCheckResourceAttr("data.google_dns_keys.foo_dns_key_id", "zone_signing_keys.#", "1"), +// ), +// }, +// }, +// }) +// } + +func testAccDataSourceDNSKeysConfig_AdcCredentials(context map[string]interface{}) string { + return acctest.Nprintf(` + +// The auth problem isn't triggered unless provider block is +// present in the test config. + +provider "google" { + credentials = "%{credentials_path}" +} + +resource "google_dns_managed_zone" "foo" { + name = "%{dns_zone_name}" + dns_name = "dnssec.gcp.tfacc.hashicorptest.com." + + dnssec_config { + state = "%{dnssec_status}" + non_existence = "nsec3" + } +} + +data "google_dns_keys" "foo_dns_key" { + managed_zone = google_dns_managed_zone.foo.name +} + +data "google_dns_keys" "foo_dns_key_id" { + managed_zone = google_dns_managed_zone.foo.id +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/dns/go/data_source_dns_managed_zone_test.go.tmpl b/mmv1/third_party/terraform/services/dns/go/data_source_dns_managed_zone_test.go.tmpl new file mode 100644 index 000000000000..f3b7ed151874 --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/go/data_source_dns_managed_zone_test.go.tmpl @@ -0,0 +1,59 @@ +package dns_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDataSourceDnsManagedZone_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDnsManagedZone_basic(acctest.RandString(t, 10)), + Check: acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( + "data.google_dns_managed_zone.qa", + "google_dns_managed_zone.foo", + map[string]struct{}{ + "dnssec_config.#": {}, + "private_visibility_config.#": {}, + "peering_config.#": {}, + "forwarding_config.#": {}, + "force_destroy": {}, + "labels.#": {}, + "terraform_labels.%": {}, + "effective_labels.%": {}, + "creation_time": {}, + "cloud_logging_config.#": {}, + "cloud_logging_config.0.%": {}, + "cloud_logging_config.0.enable_logging": {}, +{{- if ne $.TargetVersionName "ga" }} + "reverse_lookup": {}, +{{- end }} + }, + ), + }, + }, + }) +} + +func testAccDataSourceDnsManagedZone_basic(managedZoneName string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foo" { + name = "tf-test-qa-zone-%s" + dns_name = "qa.gcp.tfacc.hashicorptest.com." + description = "QA DNS zone" +} + +data "google_dns_managed_zone" "qa" { + name = google_dns_managed_zone.foo.name +} +`, managedZoneName) +} diff --git a/mmv1/third_party/terraform/services/dns/go/data_source_dns_record_set_test.go b/mmv1/third_party/terraform/services/dns/go/data_source_dns_record_set_test.go new file mode 100644 index 000000000000..1169033b5cd1 --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/go/data_source_dns_record_set_test.go @@ -0,0 +1,54 @@ +package dns_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAcccDataSourceDnsRecordSet_basic(t *testing.T) { + t.Parallel() + + name := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceDnsRecordSet_basic(name), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_dns_record_set.rs", "google_dns_record_set.rs"), + ), + }, + }, + }) +} + +func testAccDataSourceDnsRecordSet_basic(randString string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "zone" { + name = "tf-test-zone-%s" + dns_name = "%s.hashicorptest.com." +} + +resource "google_dns_record_set" "rs" { + managed_zone = google_dns_managed_zone.zone.name + name = "%s.${google_dns_managed_zone.zone.dns_name}" + type = "A" + ttl = 300 + rrdatas = [ + "192.168.1.0", + ] +} + +data "google_dns_record_set" "rs" { + managed_zone = google_dns_record_set.rs.managed_zone + name = google_dns_record_set.rs.name + type = google_dns_record_set.rs.type +} +`, randString, randString, randString) +} diff --git a/mmv1/third_party/terraform/services/dns/go/resource_dns_managed_zone_test.go.tmpl b/mmv1/third_party/terraform/services/dns/go/resource_dns_managed_zone_test.go.tmpl new file mode 100644 index 000000000000..d95ab34faaad --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/go/resource_dns_managed_zone_test.go.tmpl @@ -0,0 +1,663 @@ +package dns_test + +import ( + "fmt" + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + tpgdns "github.com/hashicorp/terraform-provider-google/google/services/dns" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "google.golang.org/api/dns/v1" +) + +func TestAccDNSManagedZone_update(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsManagedZone_basic(zoneSuffix, "description1", map[string]string{"foo": "bar", "ping": "pong"}), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccDnsManagedZone_basic(zoneSuffix, "description2", map[string]string{"foo": "bar"}), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccDNSManagedZone_privateUpdate(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsManagedZone_privateUpdate(zoneSuffix, "network-1", "network-2"), + }, + { + ResourceName: "google_dns_managed_zone.private", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsManagedZone_privateUpdate(zoneSuffix, "network-2", "network-3"), + }, + { + ResourceName: "google_dns_managed_zone.private", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSManagedZone_dnssec_update(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsManagedZone_dnssec_on(zoneSuffix), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsManagedZone_dnssec_off(zoneSuffix), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSManagedZone_dnssec_empty(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsManagedZone_dnssec_empty(zoneSuffix), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSManagedZone_privateForwardingUpdate(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsManagedZone_privateForwardingUpdate(zoneSuffix, "172.16.1.10", "172.16.1.20", "default", "private"), + }, + { + ResourceName: "google_dns_managed_zone.private", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsManagedZone_privateForwardingUpdate(zoneSuffix, "172.16.1.10", "192.168.1.1", "private", "default"), + }, + { + ResourceName: "google_dns_managed_zone.private", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSManagedZone_cloudLoggingConfigUpdate(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsManagedZone_cloudLoggingConfig_basic(zoneSuffix), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccDnsManagedZone_cloudLoggingConfig_update(zoneSuffix, true), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccDnsManagedZone_cloudLoggingConfig_update(zoneSuffix, false), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +{{ if or (ne $.TargetVersionName ``) (eq $.TargetVersionName `ga`) }} +func TestAccDNSManagedZone_reverseLookup(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsManagedZone_reverseLookup(zoneSuffix), + }, + { + ResourceName: "google_dns_managed_zone.reverse", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func TestAccDNSManagedZone_forceDestroy(t *testing.T) { + //t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + project := envvar.GetTestProjectFromEnv() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDNSManagedZone_forceDestroy(zoneSuffix), + Check: resource.ComposeTestCheckFunc( + testAccCheckManagedZoneCreateRRs(t, zoneSuffix, project), + ), + }, + }, + }) +} + +func testAccCheckManagedZoneCreateRRs(t *testing.T, zoneSuffix string, project string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + zone := fmt.Sprintf("mzone-test-%s", zoneSuffix) + // Build the change + chg := &dns.Change{ + Additions: []*dns.ResourceRecordSet{ + { + Name: fmt.Sprintf("cname.%s.hashicorptest.com.", zoneSuffix), + Type: "CNAME", + Ttl: 300, + Rrdatas: []string{"foo.example.com."}, + }, + { + Name: fmt.Sprintf("a.%s.hashicorptest.com.", zoneSuffix), + Type: "A", + Ttl: 300, + Rrdatas: []string{"1.1.1.1"}, + }, + { + Name: fmt.Sprintf("nested.%s.hashicorptest.com.", zoneSuffix), + Type: "NS", + Ttl: 300, + Rrdatas: []string{"ns.hashicorp.services.", "ns2.hashicorp.services."}, + }, + }, + } + + chg, err := config.NewDnsClient(config.UserAgent).Changes.Create(project, zone, chg).Do() + if err != nil { + return fmt.Errorf("Error creating DNS RecordSet: %s", err) + } + + w := &tpgdns.DnsChangeWaiter{ + Service: config.NewDnsClient(config.UserAgent), + Change: chg, + Project: project, + ManagedZone: zone, + } + _, err = w.Conf().WaitForState() + if err != nil { + return fmt.Errorf("Error waiting for Google DNS change: %s", err) + } + + return nil + } +} + +func testAccDNSManagedZone_forceDestroy(suffix string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "%s.hashicorptest.com." + labels = { + foo = "bar" + } + force_destroy = true + visibility = "public" +} +`, suffix, suffix) +} + +func testAccDnsManagedZone_basic(suffix, description string, labels map[string]string) string { + labelsRep := "" + for k, v := range labels { + labelsRep += fmt.Sprintf("%s = %q, ", k, v) + } + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + description = "%s" + labels = {%s} + visibility = "public" +} +`, suffix, suffix, description, labelsRep) +} + +func testAccDnsManagedZone_dnssec_on(suffix string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + + dnssec_config { + state = "on" + default_key_specs { + algorithm = "rsasha256" + key_length = "2048" + key_type = "zoneSigning" + } + default_key_specs { + algorithm = "rsasha256" + key_length = "2048" + key_type = "keySigning" + } + + non_existence = "nsec" + } +} +`, suffix, suffix) +} + +func testAccDnsManagedZone_dnssec_off(suffix string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + + dnssec_config { + state = "off" + default_key_specs { + algorithm = "rsasha256" + key_length = "2048" + key_type = "zoneSigning" + } + default_key_specs { + algorithm = "rsasha256" + key_length = "2048" + key_type = "keySigning" + } + + non_existence = "nsec3" + } +} +`, suffix, suffix) +} + +func testAccDnsManagedZone_dnssec_empty(suffix string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + + dnssec_config { + state = "off" + } +} +`, suffix, suffix) +} + +func testAccDnsManagedZone_privateUpdate(suffix, first_network, second_network string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "private" { + name = "private-zone-%s" + dns_name = "private.example.com." + description = "Example private DNS zone" + visibility = "private" + private_visibility_config { + networks { + network_url = google_compute_network.%s.self_link + } + networks { + network_url = google_compute_network.%s.self_link + } + gke_clusters { + gke_cluster_name = google_container_cluster.cluster-1.id + } + } +} + +resource "google_compute_network" "network-1" { + name = "tf-test-net-1-%s" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network-2" { + name = "tf-test-net-2-%s" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network-3" { + name = "tf-test-network-3-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "subnetwork-1" { + name = google_compute_network.network-1.name + network = google_compute_network.network-1.name + ip_cidr_range = "10.0.36.0/24" + region = "us-central1" + private_ip_google_access = true + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.0.32.0/22" + } +} + +resource "google_container_cluster" "cluster-1" { + name = "tf-test-cluster-1-%s" + location = "us-central1-c" + initial_node_count = 1 + deletion_protection = false + + networking_mode = "VPC_NATIVE" + default_snat_status { + disabled = true + } + network = google_compute_network.network-1.name + subnetwork = google_compute_subnetwork.subnetwork-1.name + + private_cluster_config { + enable_private_endpoint = true + enable_private_nodes = true + master_ipv4_cidr_block = "10.42.0.0/28" + master_global_access_config { + enabled = true + } + } + master_authorized_networks_config { + } + ip_allocation_policy { + cluster_secondary_range_name = google_compute_subnetwork.subnetwork-1.secondary_ip_range[0].range_name + services_secondary_range_name = google_compute_subnetwork.subnetwork-1.secondary_ip_range[1].range_name + } +} +`, suffix, first_network, second_network, suffix, suffix, suffix, suffix) +} + +func testAccDnsManagedZone_privateForwardingUpdate(suffix, first_nameserver, second_nameserver, first_forwarding_path, second_forwarding_path string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "private" { + name = "private-zone-%s" + dns_name = "private.example.com." + description = "Example private DNS zone" + visibility = "private" + private_visibility_config { + networks { + network_url = google_compute_network.network-1.self_link + } + } + + forwarding_config { + target_name_servers { + ipv4_address = "%s" + forwarding_path = "%s" + } + target_name_servers { + ipv4_address = "%s" + forwarding_path = "%s" + } + } +} + +resource "google_compute_network" "network-1" { + name = "tf-test-net-1-%s" + auto_create_subnetworks = false +} +`, suffix, first_nameserver, first_forwarding_path, second_nameserver, second_forwarding_path, suffix) +} + +func testAccDnsManagedZone_cloudLoggingConfig_basic(suffix string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + description = "Example DNS zone" + labels = { + foo = "bar" + } +} +`, suffix, suffix) +} + +func testAccDnsManagedZone_cloudLoggingConfig_update(suffix string, enableCloudLogging bool) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + description = "Example DNS zone" + labels = { + foo = "bar" + } + + cloud_logging_config { + enable_logging = %t + } +} +`, suffix, suffix, enableCloudLogging) +} + +{{ if or (ne $.TargetVersionName ``) (eq $.TargetVersionName `ga`) }} +func testAccDnsManagedZone_reverseLookup(suffix string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "reverse" { + name = "reverse-zone-%s" + dns_name = "1.0.168.192.in-addr.arpa." + visibility = "private" + description = "Example private DNS zone" + + reverse_lookup = true +} + +resource "google_compute_network" "network-1" { + name = "tf-test-net-1-%s" + auto_create_subnetworks = false +} +`, suffix, suffix) +} +{{- end }} + +func TestDnsManagedZoneImport_parseImportId(t *testing.T) { + zoneRegexes := []string{ + "projects/(?P[^/]+)/managedZones/(?P[^/]+)", + "(?P[^/]+)/managedZones/(?P[^/]+)", + "(?P[^/]+)", + } + + cases := map[string]struct { + ImportId string + IdRegexes []string + Config *transport_tpg.Config + ExpectedSchemaValues map[string]interface{} + ExpectError bool + }{ + "full self_link": { + IdRegexes: zoneRegexes, + ImportId: "https://dns.googleapis.com/dns/v1/projects/my-project/managedZones/my-zone", + ExpectedSchemaValues: map[string]interface{}{ + "project": "my-project", + "name": "my-zone", + }, + }, + "relative self_link": { + IdRegexes: zoneRegexes, + ImportId: "projects/my-project/managedZones/my-zone", + ExpectedSchemaValues: map[string]interface{}{ + "project": "my-project", + "name": "my-zone", + }, + }, + "short id": { + IdRegexes: zoneRegexes, + ImportId: "my-project/managedZones/my-zone", + ExpectedSchemaValues: map[string]interface{}{ + "project": "my-project", + "name": "my-zone", + }, + }, + "short id with default project and region": { + IdRegexes: zoneRegexes, + ImportId: "my-zone", + Config: &transport_tpg.Config{ + Project: "default-project", + }, + ExpectedSchemaValues: map[string]interface{}{ + "project": "default-project", + "name": "my-zone", + }, + }, + } + + for tn, tc := range cases { + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: make(map[string]interface{}), + } + d.SetId(tc.ImportId) + config := tc.Config + if config == nil { + config = &transport_tpg.Config{} + } + // + if err := tpgresource.ParseImportId(tc.IdRegexes, d, config); err == nil { + for k, expectedValue := range tc.ExpectedSchemaValues { + if v, ok := d.GetOk(k); ok { + if v != expectedValue { + t.Errorf("%s failed; Expected value %q for field %q, got %q", tn, expectedValue, k, v) + } + } else { + t.Errorf("%s failed; Expected a value for field %q", tn, k) + } + } + } else if !tc.ExpectError { + t.Errorf("%s failed; unexpected error: %s", tn, err) + } + } +} + +func TestAccDNSManagedZone_importWithProject(t *testing.T) { + t.Parallel() + + zoneSuffix := acctest.RandString(t, 10) + project := envvar.GetTestProjectFromEnv() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSManagedZoneDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsManagedZone_basicWithProject(zoneSuffix, "description1", project), + }, + { + ResourceName: "google_dns_managed_zone.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDnsManagedZone_basicWithProject(suffix, description, project string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "foobar" { + name = "mzone-test-%s" + dns_name = "tf-acctest-%s.hashicorptest.com." + description = "%s" + project = "%s" +} +`, suffix, suffix, description, project) +} diff --git a/mmv1/third_party/terraform/services/dns/go/resource_dns_policy_test.go b/mmv1/third_party/terraform/services/dns/go/resource_dns_policy_test.go new file mode 100644 index 000000000000..69bde4cb51d9 --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/go/resource_dns_policy_test.go @@ -0,0 +1,72 @@ +package dns_test + +import ( + "fmt" + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDNSPolicy_update(t *testing.T) { + t.Parallel() + + policySuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDNSPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsPolicy_privateUpdate(policySuffix, "true", "172.16.1.10", "172.16.1.30", "network-1"), + }, + { + ResourceName: "google_dns_policy.example-policy", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsPolicy_privateUpdate(policySuffix, "false", "172.16.1.20", "172.16.1.40", "network-2"), + }, + { + ResourceName: "google_dns_policy.example-policy", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDnsPolicy_privateUpdate(suffix, forwarding, first_nameserver, second_nameserver, network string) string { + return fmt.Sprintf(` +resource "google_dns_policy" "example-policy" { + name = "example-policy-%s" + enable_inbound_forwarding = %s + + alternative_name_server_config { + target_name_servers { + ipv4_address = "%s" + } + target_name_servers { + ipv4_address = "%s" + forwarding_path = "private" + } + } + + networks { + network_url = google_compute_network.%s.self_link + } +} + +resource "google_compute_network" "network-1" { + name = "tf-test-network-1-%s" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network-2" { + name = "tf-test-network-2-%s" + auto_create_subnetworks = false +} +`, suffix, forwarding, first_nameserver, second_nameserver, network, suffix, suffix) +} diff --git a/mmv1/third_party/terraform/services/dns/go/resource_dns_record_set_test.go.tmpl b/mmv1/third_party/terraform/services/dns/go/resource_dns_record_set_test.go.tmpl new file mode 100644 index 000000000000..d3621e9b8bb3 --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/go/resource_dns_record_set_test.go.tmpl @@ -0,0 +1,1186 @@ +package dns_test + +import ( + "fmt" + "net" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + tpgdns "github.com/hashicorp/terraform-provider-google/google/services/dns" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "google.golang.org/api/dns/v1" +) + +func TestIpv6AddressDiffSuppress(t *testing.T) { + cases := map[string]struct { + Old, New []string + ShouldSuppress bool + }{ + "compact form should suppress diff": { + Old: []string{"2a03:b0c0:1:e0::29b:8001"}, + New: []string{"2a03:b0c0:0001:00e0:0000:0000:029b:8001"}, + ShouldSuppress: true, + }, + "different address should not suppress diff": { + Old: []string{"2a03:b0c0:1:e00::29b:8001"}, + New: []string{"2a03:b0c0:0001:00e0:0000:0000:029b:8001"}, + ShouldSuppress: false, + }, + "increase address should not suppress diff": { + Old: []string{""}, + New: []string{"2a03:b0c0:0001:00e0:0000:0000:029b:8001"}, + ShouldSuppress: false, + }, + "decrease address should not suppress diff": { + Old: []string{"2a03:b0c0:1:e00::29b:8001"}, + New: []string{""}, + ShouldSuppress: false, + }, + "switch address positions should suppress diff": { + Old: []string{"2a03:b0c0:1:e00::28b:8001", "2a03:b0c0:1:e0::29b:8001"}, + New: []string{"2a03:b0c0:1:e0::29b:8001", "2a03:b0c0:1:e00::28b:8001"}, + ShouldSuppress: true, + }, + } + + parseFunc := func(x string) string { + return net.ParseIP(x).String() + } + + for tn, tc := range cases { + shouldSuppress := tpgdns.RrdatasListDiffSuppress(tc.Old, tc.New, parseFunc, nil) + if shouldSuppress != tc.ShouldSuppress { + t.Errorf("%s: expected %t", tn, tc.ShouldSuppress) + } + } +} + +func TestAccDNSRecordSet_basic(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/test-record.%s.hashicorptest.com./A", zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + // Check both import formats + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_Update(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.11", 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.11", 600), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_changeType(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_bigChange(zoneName, 600), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./CNAME", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_nestedNS(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-ns-%s", acctest.RandString(t, 10)) + recordSetName := fmt.Sprintf("\"nested.%s.hashicorptest.com.\"", zoneName) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_NS(zoneName, recordSetName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/nested.%s.hashicorptest.com./NS", zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_secondaryNS(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-ns-%s", acctest.RandString(t, 10)) + recordSetName := "google_dns_managed_zone.parent-zone.dns_name" + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_NS(zoneName, recordSetName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("projects/%s/managedZones/%s/rrsets/%s.hashicorptest.com./NS", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +// tracks fix for https://github.com/hashicorp/terraform-provider-google/issues/12827 +func TestAccDNSRecordSet_deletionSOA(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-soa-%s", acctest.RandString(t, 10)) + recordSetName := "google_dns_managed_zone.parent-zone.dns_name" + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_SOA(zoneName, recordSetName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("projects/%s/managedZones/%s/rrsets/%s.hashicorptest.com./SOA", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_quotedTXT(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-txt-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_quotedTXT(zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/test-record.%s.hashicorptest.com./TXT", zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_uppercaseMX(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-txt-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_uppercaseMX(zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./MX", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_routingPolicy(t *testing.T) { + t.Parallel() + + networkName := fmt.Sprintf("tf-test-network-%s", acctest.RandString(t, 10)) + backendSubnetName := fmt.Sprintf("tf-test-backend-subnet-%s", acctest.RandString(t, 10)) + proxySubnetName := fmt.Sprintf("tf-test-proxy-subnet-%s", acctest.RandString(t, 10)) + httpHealthCheckName := fmt.Sprintf("tf-test-http-health-check-%s", acctest.RandString(t, 10)) + backendName := fmt.Sprintf("tf-test-backend-%s", acctest.RandString(t, 10)) + urlMapName := fmt.Sprintf("tf-test-url-map-%s", acctest.RandString(t, 10)) + httpProxyName := fmt.Sprintf("tf-test-http-proxy-%s", acctest.RandString(t, 10)) + forwardingRuleName := fmt.Sprintf("tf-test-forwarding-rule-%s", acctest.RandString(t, 10)) + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_routingPolicyWRR(networkName, backendName, forwardingRuleName, zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_routingPolicyGEO(networkName, backendName, forwardingRuleName, zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_routingPolicyPrimaryBackup(networkName, backendName, forwardingRuleName, zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_routingPolicyRegionalL7PrimaryBackup(networkName, proxySubnetName, httpHealthCheckName, backendName, urlMapName, httpProxyName, forwardingRuleName, zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_routingPolicyCrossRegionL7PrimaryBackup(networkName, backendSubnetName, proxySubnetName, httpHealthCheckName, backendName, urlMapName, httpProxyName, forwardingRuleName, zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_changeRouting(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_routingPolicy(zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +// Tracks fix for https://github.com/hashicorp/terraform-provider-google/issues/12043 +func TestAccDNSRecordSet_interpolated(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(t, 10)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_interpolated(zoneName), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/test-record.%s.hashicorptest.com./TXT", zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_readOutOfBandRoutingPolicyChange(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(t, 10)) + rrsetName := fmt.Sprintf("test-record.%s.hashicorptest.com.", zoneName) + ttl := 300 + rrdata := []string{"127.0.0.1", "127.0.0.10"} + routingPolicy := &dns.RRSetRoutingPolicy{ + Wrr: &dns.RRSetRoutingPolicyWrrPolicy{ + Items: []*dns.RRSetRoutingPolicyWrrPolicyWrrPolicyItem{ + { + Weight: 0, + Rrdatas: []string{"1.2.3.4", "4.3.2.1"}, + }, + { + Weight: 0, + Rrdatas: []string{"2.3.4.5", "5.4.3.2"}, + }, + }, + }, + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + PreConfig: testAccCheckDnsRecordSetSetRoutingPolicyOutOfBand(t, zoneName, rrsetName, ttl, rrdata, routingPolicy), + Config: testAccDnsRecordSet_basic(zoneName, "127.0.0.10", 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSRecordSet_readOutOfBandRrDataChange(t *testing.T) { + t.Parallel() + + zoneName := fmt.Sprintf("dnszone-test-%s", acctest.RandString(t, 10)) + rrsetName := fmt.Sprintf("test-record.%s.hashicorptest.com.", zoneName) + ttl := 300 + rrdata := []string{"127.0.0.1", "127.0.0.10"} + routingPolicy := &dns.RRSetRoutingPolicy{ + Wrr: &dns.RRSetRoutingPolicyWrrPolicy{ + Items: []*dns.RRSetRoutingPolicyWrrPolicyWrrPolicyItem{ + { + Weight: 0, + Rrdatas: []string{"1.2.3.4", "4.3.2.1"}, + }, + { + Weight: 0, + Rrdatas: []string{"2.3.4.5", "5.4.3.2"}, + }, + }, + }, + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckDnsRecordSetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsRecordSet_routingPolicy(zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + { + PreConfig: testAccCheckDnsRecordSetSetRrdataOutOfBand(t, zoneName, rrsetName, ttl, rrdata, routingPolicy), + Config: testAccDnsRecordSet_routingPolicy(zoneName, 300), + }, + { + ResourceName: "google_dns_record_set.foobar", + ImportStateId: fmt.Sprintf("%s/%s/test-record.%s.hashicorptest.com./A", envvar.GetTestProjectFromEnv(), zoneName, zoneName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckDnsRecordSetSetRoutingPolicyOutOfBand(t *testing.T, zoneName, rrsetName string, ttl int, rrdata []string, routingPolicy *dns.RRSetRoutingPolicy) func() { + return func() { + config := acctest.GoogleProviderConfig(t) + service := config.NewDnsClient(config.UserAgent).Changes + chg := &dns.Change{ + Deletions: []*dns.ResourceRecordSet{ + { + Name: rrsetName, + Type: "A", + Ttl: int64(ttl), + Rrdatas: rrdata, + }, + }, + Additions: []*dns.ResourceRecordSet{ + { + Name: rrsetName, + Type: "A", + Ttl: int64(ttl), + RoutingPolicy: routingPolicy, + }, + }, + } + chg, err := service.Create(config.Project, zoneName, chg).Do() + if err != nil { + t.Errorf("Error while changing rrset %s/%s/%s out of band: %s", config.Project, zoneName, rrsetName, err) + return + } + w := &tpgdns.DnsChangeWaiter{ + Service: config.NewDnsClient(config.UserAgent), + Change: chg, + Project: config.Project, + ManagedZone: zoneName, + } + if _, err = w.Conf().WaitForState(); err != nil { + t.Errorf("Error waiting for out of band Google DNS change: %s", err) + } + } +} + +func testAccCheckDnsRecordSetSetRrdataOutOfBand(t *testing.T, zoneName, rrsetName string, ttl int, rrdata []string, routingPolicy *dns.RRSetRoutingPolicy) func() { + return func() { + config := acctest.GoogleProviderConfig(t) + service := config.NewDnsClient(config.UserAgent).Changes + chg := &dns.Change{ + Deletions: []*dns.ResourceRecordSet{ + { + Name: rrsetName, + Type: "A", + Ttl: int64(ttl), + RoutingPolicy: routingPolicy, + }, + }, + Additions: []*dns.ResourceRecordSet{ + { + Name: rrsetName, + Type: "A", + Ttl: int64(ttl), + Rrdatas: rrdata, + }, + }, + } + chg, err := service.Create(config.Project, zoneName, chg).Do() + if err != nil { + t.Errorf("Error while changing rrset %s/%s/%s out of band: %s", config.Project, zoneName, rrsetName, err) + return + } + w := &tpgdns.DnsChangeWaiter{ + Service: config.NewDnsClient(config.UserAgent), + Change: chg, + Project: config.Project, + ManagedZone: zoneName, + } + if _, err = w.Conf().WaitForState(); err != nil { + t.Errorf("Error waiting for out of band Google DNS change: %s", err) + } + } +} + +func testAccCheckDnsRecordSetDestroyProducer(t *testing.T) func(s *terraform.State) error { + + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_dns_record_set" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}DNSBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/managedZones/{{"{{"}}managed_zone{{"}}"}}/rrsets/{{"{{"}}name{{"}}"}}/{{"{{"}}type{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("DNSResourceDnsRecordSet still exists at %s", url) + } + } + + return nil + } +} + +func testAccDnsRecordSet_basic(zoneName string, addr2 string, ttl int) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "A" + rrdatas = ["127.0.0.1", "%s"] + ttl = %d +} +`, zoneName, zoneName, zoneName, addr2, ttl) +} + +func testAccDnsRecordSet_routingPolicy(zoneName string, ttl int) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "A" + ttl = %d + routing_policy { + wrr { + weight = 0 + rrdatas = ["1.2.3.4", "4.3.2.1"] + } + + wrr { + weight = 0 + rrdatas = ["2.3.4.5", "5.4.3.2"] + } + } +} +`, zoneName, zoneName, zoneName, ttl) +} + +func testAccDnsRecordSet_NS(name string, recordSetName string, ttl int) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = %s + type = "NS" + rrdatas = ["ns.hashicorp.services.", "ns2.hashicorp.services."] + ttl = %d +} +`, name, name, recordSetName, ttl) +} + +func testAccDnsRecordSet_bigChange(zoneName string, ttl int) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "CNAME" + rrdatas = ["www.terraform.io."] + ttl = %d +} +`, zoneName, zoneName, zoneName, ttl) +} + + +func testAccDnsRecordSet_SOA(name string, recordSetName string, ttl int) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = %s + type = "SOA" + rrdatas = ["ns-cloud-a1.googledomains.com. cloud-dns-hostmaster.google.com. 629010464 900 900 1800 60"] + ttl = %d +} +`, name, name, recordSetName, ttl) +} + +func testAccDnsRecordSet_quotedTXT(name string, ttl int) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "TXT" + rrdatas = ["test", "\"quoted test\""] + ttl = %d +} +`, name, name, name, ttl) +} + +func testAccDnsRecordSet_uppercaseMX(name string, ttl int) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "MX" + rrdatas = [ + "1 ASPMX.L.GOOGLE.COM.", + "5 ALT1.ASPMX.L.GOOGLE.COM.", + "5 ALT2.ASPMX.L.GOOGLE.COM.", + "10 ASPMX2.GOOGLEMAIL.COM.", + "10 ASPMX3.GOOGLEMAIL.COM.", + ] + ttl = %d +} +`, name, name, name, ttl) +} + +func testAccDnsRecordSet_routingPolicyWRR(networkName, backendName, forwardingRuleName, zoneName string, ttl int) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "%s" +} + +resource "google_compute_region_backend_service" "backend" { + name = "%s" + region = "us-central1" +} + +resource "google_compute_forwarding_rule" "default" { + name = "%s" + region = "us-central1" + + load_balancing_scheme = "INTERNAL" + backend_service = google_compute_region_backend_service.backend.id + all_ports = true + allow_global_access = true + network = google_compute_network.default.name +} + +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + visibility = "private" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "A" + ttl = %d + + routing_policy { + wrr { + weight = 0 + rrdatas = ["1.2.3.4", "4.3.2.1"] + } + + wrr { + weight = 0 + rrdatas = ["2.3.4.5", "5.4.3.2"] + } + + wrr { + weight = 1.0 + + health_checked_targets { + internal_load_balancers { + load_balancer_type = "regionalL4ilb" + ip_address = google_compute_forwarding_rule.default.ip_address + port = "80" + ip_protocol = "tcp" + network_url = google_compute_network.default.id + project = google_compute_forwarding_rule.default.project + region = google_compute_forwarding_rule.default.region + } + } + } + } +} +`, networkName, backendName, forwardingRuleName, zoneName, zoneName, zoneName, ttl) +} + +func testAccDnsRecordSet_routingPolicyGEO(networkName, backendName, forwardingRuleName, zoneName string, ttl int) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "%s" +} + +resource "google_compute_region_backend_service" "backend" { + name = "%s" + region = "us-central1" +} + +resource "google_compute_forwarding_rule" "default" { + name = "%s" + region = "us-central1" + + load_balancing_scheme = "INTERNAL" + backend_service = google_compute_region_backend_service.backend.id + all_ports = true + allow_global_access = true + network = google_compute_network.default.name +} + +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + visibility = "private" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "A" + ttl = %d + + routing_policy { + enable_geo_fencing = true + + geo { + location = "us-east4" + rrdatas = ["1.2.3.4", "4.3.2.1"] + } + + geo { + location = "asia-east1" + rrdatas = ["2.3.4.5", "5.4.3.2"] + } + + geo { + location = "us-central1" + + health_checked_targets { + internal_load_balancers { + load_balancer_type = "regionalL4ilb" + ip_address = google_compute_forwarding_rule.default.ip_address + port = "80" + ip_protocol = "tcp" + network_url = google_compute_network.default.id + project = google_compute_forwarding_rule.default.project + region = google_compute_forwarding_rule.default.region + } + } + } + } +} +`, networkName, backendName, forwardingRuleName, zoneName, zoneName, zoneName, ttl) +} + +func testAccDnsRecordSet_routingPolicyPrimaryBackup(networkName, backendName, forwardingRuleName, zoneName string, ttl int) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "%s" +} + +resource "google_compute_region_backend_service" "backend" { + name = "%s" + region = "us-central1" +} + +resource "google_compute_forwarding_rule" "default" { + name = "%s" + region = "us-central1" + + load_balancing_scheme = "INTERNAL" + backend_service = google_compute_region_backend_service.backend.id + all_ports = true + allow_global_access = true + network = google_compute_network.default.name +} + +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + visibility = "private" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "A" + ttl = %d + + routing_policy { + primary_backup { + trickle_ratio = 0.1 + enable_geo_fencing_for_backups = true + + primary { + internal_load_balancers { + load_balancer_type = "regionalL4ilb" + ip_address = google_compute_forwarding_rule.default.ip_address + port = "80" + ip_protocol = "tcp" + network_url = google_compute_network.default.id + project = google_compute_forwarding_rule.default.project + region = google_compute_forwarding_rule.default.region + } + } + + backup_geo { + location = "us-west1" + rrdatas = ["1.2.3.4"] + } + + backup_geo { + location = "asia-east1" + rrdatas = ["5.6.7.8"] + } + } + } +} +`, networkName, backendName, forwardingRuleName, zoneName, zoneName, zoneName, ttl) +} + +func testAccDnsRecordSet_routingPolicyRegionalL7PrimaryBackup(networkName, proxySubnetName, healthCheckName, backendName, urlMapName, httpProxyName, forwardingRuleName, zoneName string, ttl int) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "%s" +} + +resource "google_compute_subnetwork" "proxy_subnet" { + name = "%s" + ip_cidr_range = "10.100.0.0/24" + region = "us-central1" + purpose = "INTERNAL_HTTPS_LOAD_BALANCER" + role = "ACTIVE" + network = google_compute_network.default.id +} + +resource "google_compute_region_health_check" "health_check" { + name = "%s" + region = "us-central1" + + http_health_check { + port = 80 + } +} + +resource "google_compute_region_backend_service" "backend" { + name = "%s" + region = "us-central1" + load_balancing_scheme = "INTERNAL_MANAGED" + protocol = "HTTP" + health_checks = [google_compute_region_health_check.health_check.id] +} + +resource "google_compute_region_url_map" "url_map" { + name = "%s" + region = "us-central1" + default_service = google_compute_region_backend_service.backend.id +} + +resource "google_compute_region_target_http_proxy" "http_proxy" { + name = "%s" + region = "us-central1" + url_map = google_compute_region_url_map.url_map.id +} + +resource "google_compute_forwarding_rule" "default" { + name = "%s" + region = "us-central1" + depends_on = [google_compute_subnetwork.proxy_subnet] + load_balancing_scheme = "INTERNAL_MANAGED" + target = google_compute_region_target_http_proxy.http_proxy.id + port_range = "80" + allow_global_access = true + network = google_compute_network.default.name + ip_protocol = "TCP" +} + +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + visibility = "private" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "A" + ttl = %d + + routing_policy { + primary_backup { + trickle_ratio = 0.1 + enable_geo_fencing_for_backups = true + + primary { + internal_load_balancers { + load_balancer_type = "regionalL7ilb" + ip_address = google_compute_forwarding_rule.default.ip_address + port = "80" + ip_protocol = "tcp" + network_url = google_compute_network.default.id + project = google_compute_forwarding_rule.default.project + region = google_compute_forwarding_rule.default.region + } + } + + backup_geo { + location = "us-west1" + rrdatas = ["1.2.3.4"] + } + + backup_geo { + location = "asia-east1" + rrdatas = ["5.6.7.8"] + } + } + } +} +`, networkName, proxySubnetName, healthCheckName, backendName, urlMapName, httpProxyName, forwardingRuleName, zoneName, zoneName, zoneName, ttl) +} + +func testAccDnsRecordSet_routingPolicyCrossRegionL7PrimaryBackup(networkName, backendSubnetName, proxySubnetName, healthCheckName, backendName, urlMapName, httpProxyName, forwardingRuleName, zoneName string, ttl int) string { + return fmt.Sprintf(` +resource "google_compute_network" "default" { + name = "%s" +} + +resource "google_compute_subnetwork" "backend_subnet" { + name = "%s" + ip_cidr_range = "10.0.1.0/24" + region = "us-central1" + network = google_compute_network.default.id +} + +resource "google_compute_subnetwork" "proxy_subnet" { + name = "%s" + ip_cidr_range = "10.100.0.0/24" + region = "us-central1" + purpose = "GLOBAL_MANAGED_PROXY" + role = "ACTIVE" + network = google_compute_network.default.id +} + +resource "google_compute_health_check" "health_check" { + name = "%s" + + http_health_check { + port = 80 + } +} + +resource "google_compute_backend_service" "backend" { + name = "%s" + load_balancing_scheme = "INTERNAL_MANAGED" + protocol = "HTTP" + health_checks = [google_compute_health_check.health_check.id] +} + +resource "google_compute_url_map" "url_map" { + name = "%s" + default_service = google_compute_backend_service.backend.id +} + +resource "google_compute_target_http_proxy" "http_proxy" { + name = "%s" + url_map = google_compute_url_map.url_map.id +} + +resource "google_compute_global_forwarding_rule" "default" { + name = "%s" + depends_on = [google_compute_subnetwork.proxy_subnet] + load_balancing_scheme = "INTERNAL_MANAGED" + target = google_compute_target_http_proxy.http_proxy.id + port_range = "80" + network = google_compute_network.default.name + subnetwork = google_compute_subnetwork.backend_subnet.name + ip_protocol = "TCP" +} + +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" + visibility = "private" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "A" + ttl = %d + + routing_policy { + primary_backup { + trickle_ratio = 0.1 + enable_geo_fencing_for_backups = true + + primary { + internal_load_balancers { + load_balancer_type = "globalL7ilb" + ip_address = google_compute_global_forwarding_rule.default.ip_address + port = "80" + ip_protocol = "tcp" + network_url = google_compute_network.default.id + project = google_compute_global_forwarding_rule.default.project + } + } + + backup_geo { + location = "us-west1" + rrdatas = ["1.2.3.4"] + } + + backup_geo { + location = "asia-east1" + rrdatas = ["5.6.7.8"] + } + } + } +} +`, networkName, backendSubnetName, proxySubnetName, healthCheckName, backendName, urlMapName, httpProxyName, forwardingRuleName, zoneName, zoneName, zoneName, ttl) +} + +func testAccDnsRecordSet_interpolated(zoneName string) string { + return fmt.Sprintf(` +resource "google_dns_managed_zone" "parent-zone" { + name = "%s" + dns_name = "%s.hashicorptest.com." + description = "Test Description" +} + +resource "google_dns_record_set" "foobar" { + managed_zone = google_dns_managed_zone.parent-zone.name + name = "test-record.%s.hashicorptest.com." + type = "TXT" + rrdatas = ["127.0.0.1", "firebase=${google_dns_managed_zone.parent-zone.id}"] + ttl = 10 +} +`, zoneName, zoneName, zoneName) +} diff --git a/mmv1/third_party/terraform/services/dns/go/resource_dns_response_policy_rule_test.go.tmpl b/mmv1/third_party/terraform/services/dns/go/resource_dns_response_policy_rule_test.go.tmpl new file mode 100644 index 000000000000..b7d29eaebc6c --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/go/resource_dns_response_policy_rule_test.go.tmpl @@ -0,0 +1,197 @@ +package dns_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDNSResponsePolicyRule_update(t *testing.T) { + t.Parallel() + + responsePolicyRuleSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckDNSResponsePolicyRuleDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsResponsePolicyRule_privateUpdate(responsePolicyRuleSuffix, "network-1"), + }, + { + ResourceName: "google_dns_response_policy_rule.example-response-policy-rule", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsResponsePolicyRule_privateUpdate(responsePolicyRuleSuffix, "network-2"), + }, + { + ResourceName: "google_dns_response_policy_rule.example-response-policy-rule", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccDNSResponsePolicyRuleBehavior_update(t *testing.T) { + t.Parallel() + + responsePolicyRuleSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckDNSResponsePolicyRuleDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsResponsePolicyRuleBehavior_unspecified(responsePolicyRuleSuffix, "network-1"), + }, + { + ResourceName: "google_dns_response_policy_rule.example-response-policy-rule-behavior", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsResponsePolicyRuleBehavior_byPass(responsePolicyRuleSuffix, "network-1"), + }, + { + ResourceName: "google_dns_response_policy_rule.example-response-policy-rule-behavior", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsResponsePolicyRuleBehavior_unspecified(responsePolicyRuleSuffix, "network-1"), + }, + { + ResourceName: "google_dns_response_policy_rule.example-response-policy-rule-behavior", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDnsResponsePolicyRuleBehavior_unspecified(suffix, network string) string { + return fmt.Sprintf(` + +resource "google_compute_network" "network-1" { + provider = google-beta + + name = "tf-test-network-1-%s" + auto_create_subnetworks = false +} + +resource "google_dns_response_policy" "response-policy" { + provider = google-beta + + response_policy_name = "tf-test-response-policy-%s" + + networks { + network_url = google_compute_network.%s.self_link + } +} + +resource "google_dns_response_policy_rule" "example-response-policy-rule-behavior" { + provider = google-beta + + response_policy = google_dns_response_policy.response-policy.response_policy_name + rule_name = "tf-test-response-policy-rule-%s" + dns_name = "dns.example.com." + + local_data { + local_datas { + name = "dns.example.com." + type = "A" + ttl = 300 + rrdatas = ["192.0.2.91"] + } + } +} + +`, suffix, suffix, network, suffix) +} + +func testAccDnsResponsePolicyRuleBehavior_byPass(suffix, network string) string { + return fmt.Sprintf(` + +resource "google_compute_network" "network-1" { + provider = google-beta + + name = "tf-test-network-1-%s" + auto_create_subnetworks = false +} + +resource "google_dns_response_policy" "response-policy" { + provider = google-beta + + response_policy_name = "tf-test-response-policy-%s" + + networks { + network_url = google_compute_network.%s.self_link + } +} + +resource "google_dns_response_policy_rule" "example-response-policy-rule-behavior" { + provider = google-beta + + behavior = "bypassResponsePolicy" + dns_name = "dns.example.com." + rule_name = "tf-test-response-policy-rule-%s" + response_policy = google_dns_response_policy.response-policy.response_policy_name + +} +`, suffix, suffix, network, suffix) +} + + +func testAccDnsResponsePolicyRule_privateUpdate(suffix, network string) string { + return fmt.Sprintf(` +resource "google_compute_network" "network-1" { + provider = google-beta + + name = "tf-test-network-1-%s" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network-2" { + provider = google-beta + + name = "tf-test-network-2-%s" + auto_create_subnetworks = false +} + +resource "google_dns_response_policy" "response-policy" { + provider = google-beta + + response_policy_name = "tf-test-response-policy-%s" + + networks { + network_url = google_compute_network.%s.self_link + } +} + +resource "google_dns_response_policy_rule" "example-response-policy-rule" { + provider = google-beta + + response_policy = google_dns_response_policy.response-policy.response_policy_name + rule_name = "tf-test-response-policy-rule-%s" + dns_name = "dns.example.com." + + local_data { + local_datas { + name = "dns.example.com." + type = "A" + ttl = 300 + rrdatas = ["192.0.2.91"] + } + } +} +`, suffix, suffix, suffix, network, suffix) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/dns/go/resource_dns_response_policy_test.go.tmpl b/mmv1/third_party/terraform/services/dns/go/resource_dns_response_policy_test.go.tmpl new file mode 100644 index 000000000000..26959c441a7f --- /dev/null +++ b/mmv1/third_party/terraform/services/dns/go/resource_dns_response_policy_test.go.tmpl @@ -0,0 +1,210 @@ +package dns_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccDNSResponsePolicy_update(t *testing.T) { + t.Parallel() + + responsePolicySuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckDNSResponsePolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDnsResponsePolicy_privateUpdate(responsePolicySuffix, "network-1"), + }, + { + ResourceName: "google_dns_response_policy.example-response-policy", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsResponsePolicy_privateUpdate(responsePolicySuffix, "network-2"), + }, + { + ResourceName: "google_dns_response_policy.example-response-policy", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccDnsResponsePolicy_removeNetworks(responsePolicySuffix), + }, + { + ResourceName: "google_dns_response_policy.example-response-policy", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccDnsResponsePolicy_privateUpdate(suffix, network string) string { + return fmt.Sprintf(` +resource "google_dns_response_policy" "example-response-policy" { + provider = google-beta + + response_policy_name = "tf-test-response-policy-%s" + + networks { + network_url = google_compute_network.%s.self_link + } + gke_clusters { + gke_cluster_name = google_container_cluster.cluster-1.id + } +} + +resource "google_compute_network" "network-1" { + provider = google-beta + + name = "tf-test-network-1-%s" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network-2" { + provider = google-beta + + name = "tf-test-network-2-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "subnetwork-1" { + provider = google-beta + + name = google_compute_network.network-1.name + network = google_compute_network.network-1.name + ip_cidr_range = "10.0.36.0/24" + region = "us-central1" + private_ip_google_access = true + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.0.32.0/22" + } +} + +resource "google_container_cluster" "cluster-1" { + provider = google-beta + + name = "tf-test-cluster-1-%s" + location = "us-central1-c" + initial_node_count = 1 + + networking_mode = "VPC_NATIVE" + default_snat_status { + disabled = true + } + network = google_compute_network.network-1.name + subnetwork = google_compute_subnetwork.subnetwork-1.name + + private_cluster_config { + enable_private_endpoint = true + enable_private_nodes = true + master_ipv4_cidr_block = "10.42.0.0/28" + master_global_access_config { + enabled = true + } + } + master_authorized_networks_config { + } + ip_allocation_policy { + cluster_secondary_range_name = google_compute_subnetwork.subnetwork-1.secondary_ip_range[0].range_name + services_secondary_range_name = google_compute_subnetwork.subnetwork-1.secondary_ip_range[1].range_name + } + deletion_protection = false +} +`, suffix, network, suffix, suffix, suffix) +} + +func testAccDnsResponsePolicy_removeNetworks(suffix string) string { + return fmt.Sprintf(` +resource "google_dns_response_policy" "example-response-policy" { + provider = google-beta + + response_policy_name = "tf-test-response-policy-%s" + + gke_clusters { + gke_cluster_name = google_container_cluster.cluster-1.id + } +} + +resource "google_compute_network" "network-1" { + provider = google-beta + + name = "tf-test-network-1-%s" + auto_create_subnetworks = false +} + +resource "google_compute_network" "network-2" { + provider = google-beta + + name = "tf-test-network-2-%s" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "subnetwork-1" { + provider = google-beta + + name = google_compute_network.network-1.name + network = google_compute_network.network-1.name + ip_cidr_range = "10.0.36.0/24" + region = "us-central1" + private_ip_google_access = true + + secondary_ip_range { + range_name = "pod" + ip_cidr_range = "10.0.0.0/19" + } + + secondary_ip_range { + range_name = "svc" + ip_cidr_range = "10.0.32.0/22" + } +} + +resource "google_container_cluster" "cluster-1" { + provider = google-beta + + name = "tf-test-cluster-1-%s" + location = "us-central1-c" + initial_node_count = 1 + + networking_mode = "VPC_NATIVE" + default_snat_status { + disabled = true + } + network = google_compute_network.network-1.name + subnetwork = google_compute_subnetwork.subnetwork-1.name + + private_cluster_config { + enable_private_endpoint = true + enable_private_nodes = true + master_ipv4_cidr_block = "10.42.0.0/28" + master_global_access_config { + enabled = true + } + } + master_authorized_networks_config { + } + ip_allocation_policy { + cluster_secondary_range_name = google_compute_subnetwork.subnetwork-1.secondary_ip_range[0].range_name + services_secondary_range_name = google_compute_subnetwork.subnetwork-1.secondary_ip_range[1].range_name + } + deletion_protection = false +} +`, suffix, suffix, suffix, suffix) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_channel_test.go.tmpl b/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_channel_test.go.tmpl new file mode 100644 index 000000000000..4c518290aebc --- /dev/null +++ b/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_channel_test.go.tmpl @@ -0,0 +1,207 @@ +package eventarc_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + + dcl "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" + eventarc "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/eventarc{{ $.DCLVersion }}" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccEventarcChannel_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "region": envvar.GetTestRegionFromEnv(), + "project_name": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckEventarcChannelDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccEventarcChannel_basic(context), + }, + { + ResourceName: "google_eventarc_channel.primary", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccEventarcChannel_cryptoKeyUpdate(t *testing.T) { + t.Parallel() + + region := envvar.GetTestRegionFromEnv() + key1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", region, "tf-bootstrap-eventarc-channel-key1") + key2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", region, "tf-bootstrap-eventarc-channel-key2") + + context := map[string]interface{}{ + "region": region, + "project_name": envvar.GetTestProjectFromEnv(), + "key_ring": tpgresource.GetResourceNameFromSelfLink(key1.KeyRing.Name), + "key1": tpgresource.GetResourceNameFromSelfLink(key1.CryptoKey.Name), + "key2": tpgresource.GetResourceNameFromSelfLink(key2.CryptoKey.Name), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckEventarcChannelDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccEventarcChannel_setCryptoKey(context), + }, + { + ResourceName: "google_eventarc_channel.primary", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccEventarcChannel_cryptoKeyUpdate(context), + }, + { + ResourceName: "google_eventarc_channel.primary", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccEventarcChannel_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "test_project" { + project_id = "%{project_name}" +} + +resource "google_eventarc_channel" "primary" { + location = "%{region}" + name = "tf-test-name%{random_suffix}" + third_party_provider = "projects/${data.google_project.test_project.project_id}/locations/%{region}/providers/datadog" +} +`, context) +} + +func testAccEventarcChannel_setCryptoKey(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "test_project" { + project_id = "%{project_name}" +} + +data "google_kms_key_ring" "test_key_ring" { + name = "%{key_ring}" + location = "us-central1" +} + +data "google_kms_crypto_key" "key1" { + name = "%{key1}" + key_ring = data.google_kms_key_ring.test_key_ring.id +} + + +resource "google_kms_crypto_key_iam_member" "key1_member" { + crypto_key_id = data.google_kms_crypto_key.key1.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + member = "serviceAccount:service-${data.google_project.test_project.number}@gcp-sa-eventarc.iam.gserviceaccount.com" +} + +resource "google_eventarc_channel" "primary" { + location = "%{region}" + name = "tf-test-name%{random_suffix}" + crypto_key_name = data.google_kms_crypto_key.key1.id + third_party_provider = "projects/${data.google_project.test_project.project_id}/locations/%{region}/providers/datadog" + depends_on = [google_kms_crypto_key_iam_member.key1_member] +} +`, context) +} + +func testAccEventarcChannel_cryptoKeyUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "test_project" { + project_id = "%{project_name}" +} + +data "google_kms_key_ring" "test_key_ring" { + name = "%{key_ring}" + location = "us-central1" +} + +data "google_kms_crypto_key" "key2" { + name = "%{key2}" + key_ring = data.google_kms_key_ring.test_key_ring.id +} + +resource "google_kms_crypto_key_iam_member" "key2_member" { + crypto_key_id = data.google_kms_crypto_key.key2.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + member = "serviceAccount:service-${data.google_project.test_project.number}@gcp-sa-eventarc.iam.gserviceaccount.com" +} + +resource "google_eventarc_channel" "primary" { + location = "%{region}" + name = "tf-test-name%{random_suffix}" + crypto_key_name= data.google_kms_crypto_key.key2.id + third_party_provider = "projects/${data.google_project.test_project.project_id}/locations/%{region}/providers/datadog" + depends_on = [google_kms_crypto_key_iam_member.key2_member] +} +`, context) +} + +func testAccCheckEventarcChannelDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "rs.google_eventarc_channel" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + billingProject := "" + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + obj := &eventarc.Channel{ + Location: dcl.String(rs.Primary.Attributes["location"]), + Name: dcl.String(rs.Primary.Attributes["name"]), + CryptoKeyName: dcl.String(rs.Primary.Attributes["crypto_key_name"]), + Project: dcl.StringOrNil(rs.Primary.Attributes["project"]), + ThirdPartyProvider: dcl.String(rs.Primary.Attributes["third_party_provider"]), + ActivationToken: dcl.StringOrNil(rs.Primary.Attributes["activation_token"]), + CreateTime: dcl.StringOrNil(rs.Primary.Attributes["create_time"]), + PubsubTopic: dcl.StringOrNil(rs.Primary.Attributes["pubsub_topic"]), + State: eventarc.ChannelStateEnumRef(rs.Primary.Attributes["state"]), + Uid: dcl.StringOrNil(rs.Primary.Attributes["uid"]), + UpdateTime: dcl.StringOrNil(rs.Primary.Attributes["update_time"]), + } + + client := transport_tpg.NewDCLEventarcClient(config, config.UserAgent, billingProject, 0) + _, err := client.GetChannel(context.Background(), obj) + if err == nil { + return fmt.Errorf("google_eventarc_channel still exists %v", obj) + } + } + return nil + } +} diff --git a/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_google_channel_config_test.go.tmpl b/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_google_channel_config_test.go.tmpl new file mode 100644 index 000000000000..dbe556619332 --- /dev/null +++ b/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_google_channel_config_test.go.tmpl @@ -0,0 +1,206 @@ +package eventarc_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + + dcl "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" + eventarc "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/eventarc{{ $.DCLVersion }}" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccEventarcGoogleChannelConfig_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_name": envvar.GetTestProjectFromEnv(), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckEventarcGoogleChannelConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccEventarcGoogleChannelConfig_basic(context), + }, + { + ResourceName: "google_eventarc_google_channel_config.primary", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccEventarcGoogleChannelConfig_cryptoKeyUpdate(t *testing.T) { + t.Parallel() + + region := envvar.GetTestRegionFromEnv() + key1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", region, "tf-bootstrap-eventarc-google-channel-config-key1") + key2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", region, "tf-bootstrap-eventarc-google-channel-config-key2") + + context := map[string]interface{}{ + "project_name": envvar.GetTestProjectFromEnv(), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "key_ring": tpgresource.GetResourceNameFromSelfLink(key1.KeyRing.Name), + "key1": tpgresource.GetResourceNameFromSelfLink(key1.CryptoKey.Name), + "key2": tpgresource.GetResourceNameFromSelfLink(key2.CryptoKey.Name), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckEventarcGoogleChannelConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccEventarcGoogleChannelConfig_setCryptoKey(context), + }, + { + ResourceName: "google_eventarc_google_channel_config.primary", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccEventarcGoogleChannelConfig_cryptoKeyUpdate(context), + }, + { + ResourceName: "google_eventarc_google_channel_config.primary", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccEventarcGoogleChannelConfig_deleteCryptoKey(context), + }, + }, + }) +} + +func testAccEventarcGoogleChannelConfig_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_eventarc_google_channel_config" "primary" { + location = "%{region}" + name = "projects/%{project_name}/locations/%{region}/googleChannelConfig" +} + `, context) +} + +func testAccEventarcGoogleChannelConfig_setCryptoKey(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "test_project" { + project_id = "%{project_name}" +} + +data "google_kms_key_ring" "test_key_ring" { + name = "%{key_ring}" + location = "us-central1" +} + +data "google_kms_crypto_key" "key1" { + name = "%{key1}" + key_ring = data.google_kms_key_ring.test_key_ring.id +} + +resource "google_kms_crypto_key_iam_member" "key1_member" { + crypto_key_id = data.google_kms_crypto_key.key1.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + member = "serviceAccount:service-${data.google_project.test_project.number}@gcp-sa-eventarc.iam.gserviceaccount.com" +} + +resource "google_eventarc_google_channel_config" "primary" { + location = "%{region}" + name = "projects/%{project_name}/locations/%{region}/googleChannelConfig" + crypto_key_name = data.google_kms_crypto_key.key1.id + depends_on =[google_kms_crypto_key_iam_member.key1_member] +} + `, context) +} + +func testAccEventarcGoogleChannelConfig_cryptoKeyUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "test_project" { + project_id = "%{project_name}" +} + +data "google_kms_key_ring" "test_key_ring" { + name = "%{key_ring}" + location = "us-central1" +} + +data "google_kms_crypto_key" "key2" { + name = "%{key2}" + key_ring = data.google_kms_key_ring.test_key_ring.id +} + +resource "google_kms_crypto_key_iam_member" "key2_member" { + crypto_key_id = data.google_kms_crypto_key.key2.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + member = "serviceAccount:service-${data.google_project.test_project.number}@gcp-sa-eventarc.iam.gserviceaccount.com" +} + +resource "google_eventarc_google_channel_config" "primary" { + location = "%{region}" + name = "projects/%{project_name}/locations/%{region}/googleChannelConfig" + crypto_key_name = data.google_kms_crypto_key.key2.id + depends_on =[google_kms_crypto_key_iam_member.key2_member] +} + `, context) +} + +func testAccEventarcGoogleChannelConfig_deleteCryptoKey(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_eventarc_google_channel_config" "primary" { + location = "%{region}" + name = "projects/%{project_name}/locations/%{region}/googleChannelConfig" + crypto_key_name = "" +} + `, context) +} + +func testAccCheckEventarcGoogleChannelConfigDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "rs.google_eventarc_google_channel_config" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + billingProject := "" + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + obj := &eventarc.GoogleChannelConfig{ + Location: dcl.String(rs.Primary.Attributes["location"]), + Name: dcl.String(rs.Primary.Attributes["name"]), + CryptoKeyName: dcl.String(rs.Primary.Attributes["crypto_key_name"]), + Project: dcl.StringOrNil(rs.Primary.Attributes["project"]), + UpdateTime: dcl.StringOrNil(rs.Primary.Attributes["update_time"]), + } + + client := transport_tpg.NewDCLEventarcClient(config, config.UserAgent, billingProject, 0) + _, err := client.GetGoogleChannelConfig(context.Background(), obj) + if err == nil { + return fmt.Errorf("google_eventarc_google_channel_config still exists %v", obj) + } + } + return nil + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_trigger_test.go.tmpl b/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_trigger_test.go.tmpl new file mode 100644 index 000000000000..7d358bd5739f --- /dev/null +++ b/mmv1/third_party/terraform/services/eventarc/go/resource_eventarc_trigger_test.go.tmpl @@ -0,0 +1,239 @@ +package eventarc_test + +import ( + "context" + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + + dcl "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" + eventarc "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/eventarc{{ $.DCLVersion }}" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + + +func TestAccEventarcTrigger_channel(t *testing.T) { + t.Parallel() + + region := envvar.GetTestRegionFromEnv() + key1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", region, "tf-bootstrap-eventarc-trigger-key1") + key2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", region, "tf-bootstrap-eventarc-trigger-key2") + + context := map[string]interface{}{ + "region": region, + "project_name": envvar.GetTestProjectFromEnv(), + "service_account": envvar.GetTestServiceAccountFromEnv(t), + "key_ring": tpgresource.GetResourceNameFromSelfLink(key1.KeyRing.Name), + "key1": tpgresource.GetResourceNameFromSelfLink(key1.CryptoKey.Name), + "key2": tpgresource.GetResourceNameFromSelfLink(key2.CryptoKey.Name), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckEventarcChannelTriggerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccEventarcTrigger_createTriggerWithChannelName(context), + }, + { + ResourceName: "google_eventarc_trigger.primary", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccEventarcTrigger_HttpDest(t *testing.T) { + t.Parallel() + + region := envvar.GetTestRegionFromEnv() + + testNetworkName := acctest.BootstrapSharedTestNetwork(t, "attachment-network") + subnetName := acctest.BootstrapSubnet(t, "tf-test-subnet", testNetworkName) + networkAttachmentName := acctest.BootstrapNetworkAttachment(t, "tf-test-attachment", subnetName) + + // Need to have the full network attachment name in the format project/{project_id}/regions/{region_id}/networkAttachments/{networkAttachmentName} + fullFormNetworkAttachmentName := fmt.Sprintf("projects/%s/regions/%s/networkAttachments/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), networkAttachmentName) + + context := map[string]interface{}{ + "region": region, + "project_name": envvar.GetTestProjectFromEnv(), + "service_account": envvar.GetTestServiceAccountFromEnv(t), + "network_attachment": fullFormNetworkAttachmentName, + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckEventarcChannelTriggerDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccEventarcTrigger_createTriggerWithHttpDest(context), + }, + { + ResourceName: "google_eventarc_trigger.primary", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccEventarcTrigger_createTriggerWithChannelName(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "test_project" { + project_id = "%{project_name}" +} + +data "google_kms_key_ring" "test_key_ring" { + name = "%{key_ring}" + location = "us-central1" +} + +data "google_kms_crypto_key" "key1" { + name = "%{key1}" + key_ring = data.google_kms_key_ring.test_key_ring.id +} + + +resource "google_kms_crypto_key_iam_member" "key1_member" { + crypto_key_id = data.google_kms_crypto_key.key1.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + member = "serviceAccount:service-${data.google_project.test_project.number}@gcp-sa-eventarc.iam.gserviceaccount.com" +} + +resource "google_eventarc_channel" "test_channel" { + location = "%{region}" + name = "tf-test-channel%{random_suffix}" + crypto_key_name = data.google_kms_crypto_key.key1.id + third_party_provider = "projects/${data.google_project.test_project.project_id}/locations/%{region}/providers/datadog" + depends_on = [google_kms_crypto_key_iam_member.key1_member] +} + +resource "google_cloud_run_service" "default" { + name = "tf-test-eventarc-service%{random_suffix}" + location = "%{region}" + + metadata { + namespace = "%{project_name}" + } + + template { + spec { + containers { + image = "gcr.io/cloudrun/hello" + ports { + container_port = 8080 + } + } + container_concurrency = 50 + timeout_seconds = 100 + } + } + + traffic { + percent = 100 + latest_revision = true + } +} + +resource "google_eventarc_trigger" "primary" { + name = "tf-test-trigger%{random_suffix}" + location = "%{region}" + matching_criteria { + attribute = "type" + value = "datadog.v1.alert" + } + destination { + cloud_run_service { + service = google_cloud_run_service.default.name + region = "%{region}" + } + } + service_account = "%{service_account}" + + channel = "projects/${data.google_project.test_project.project_id}/locations/%{region}/channels/${google_eventarc_channel.test_channel.name}" + + depends_on = [google_cloud_run_service.default,google_eventarc_channel.test_channel] +} +`, context) +} + +func testAccEventarcTrigger_createTriggerWithHttpDest(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "test_project" { + project_id = "%{project_name}" +} + +resource "google_eventarc_trigger" "primary" { + name = "tf-test-trigger%{random_suffix}" + location = "%{region}" + matching_criteria { + attribute = "type" + value = "google.cloud.pubsub.topic.v1.messagePublished" + } + destination { + http_endpoint { + uri = "http://10.10.10.8:80/route" + } + network_config { + network_attachment = "%{network_attachment}" + } + + } + service_account = "%{service_account}" + +} +`, context) +} + +func testAccCheckEventarcChannelTriggerDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "rs.google_eventarc_trigger" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + billingProject := "" + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + obj := &eventarc.Trigger{ + Location: dcl.String(rs.Primary.Attributes["location"]), + Name: dcl.String(rs.Primary.Attributes["name"]), + Project: dcl.StringOrNil(rs.Primary.Attributes["project"]), + ServiceAccount: dcl.String(rs.Primary.Attributes["service_account"]), + CreateTime: dcl.StringOrNil(rs.Primary.Attributes["create_time"]), + Etag: dcl.StringOrNil(rs.Primary.Attributes["etag"]), + Uid: dcl.StringOrNil(rs.Primary.Attributes["uid"]), + UpdateTime: dcl.StringOrNil(rs.Primary.Attributes["update_time"]), + Channel: dcl.StringOrNil(rs.Primary.Attributes["channel"]), + EventDataContentType: dcl.StringOrNil(rs.Primary.Attributes["event_data_content_type"]), + } + + client := transport_tpg.NewDCLEventarcClient(config, config.UserAgent, billingProject, 0) + _, err := client.GetTrigger(context.Background(), obj) + if err == nil { + return fmt.Errorf("google_eventarc_trigger still exists %v", obj) + } + } + return nil + } +} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app.go.tmpl new file mode 100644 index 000000000000..81b39b144fb3 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app.go.tmpl @@ -0,0 +1,49 @@ +package firebase +{{- if ne $.TargetVersionName "ga" }} +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func DataSourceGoogleFirebaseAndroidApp() *schema.Resource { + // Generate datasource schema from resource + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceFirebaseAndroidApp().Schema) + + // Set 'Required' schema elements + tpgresource.AddRequiredFieldsToSchema(dsSchema, "app_id") + + // Allow specifying a project + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleFirebaseAndroidAppRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleFirebaseAndroidAppRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + appId := d.Get("app_id") + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + name := fmt.Sprintf("projects/%s/androidApps/%s", project, appId.(string)) + d.SetId(name) + if err := d.Set("name", name); err != nil { + return fmt.Errorf("Error setting name: %s", err) + } + err = resourceFirebaseAndroidAppRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", name) + } + return nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_config.go.tmpl new file mode 100644 index 000000000000..261a700a2a10 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_config.go.tmpl @@ -0,0 +1,153 @@ +package firebase +{{- if ne $.TargetVersionName "ga" }} + +import ( + "context" + "fmt" + + "google.golang.org/api/firebase/v1beta1" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" + "github.com/hashicorp/terraform-provider-google/google/fwresource" +) + +// Ensure the implementation satisfies the expected interfaces +var ( + _ datasource.DataSource = &GoogleFirebaseAndroidAppConfigDataSource{} + _ datasource.DataSourceWithConfigure = &GoogleFirebaseAndroidAppConfigDataSource{} +) + +func NewGoogleFirebaseAndroidAppConfigDataSource() datasource.DataSource { + return &GoogleFirebaseAndroidAppConfigDataSource{} +} + +// GoogleFirebaseAndroidAppConfigDataSource defines the data source implementation +type GoogleFirebaseAndroidAppConfigDataSource struct { + client *firebase.Service + project types.String +} + +type GoogleFirebaseAndroidAppConfigModel struct { + Id types.String `tfsdk:"id"` + AppId types.String `tfsdk:"app_id"` + ConfigFilename types.String `tfsdk:"config_filename"` + ConfigFileContents types.String `tfsdk:"config_file_contents"` + Project types.String `tfsdk:"project"` +} + +func (d *GoogleFirebaseAndroidAppConfigDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_firebase_android_app_config" +} + +func (d *GoogleFirebaseAndroidAppConfigDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + // This description is used by the documentation generator and the language server. + MarkdownDescription: "A Google Cloud Firebase Android application configuration", + + Attributes: map[string]schema.Attribute{ + "app_id": schema.StringAttribute{ + Description: "The id of the Firebase Android App.", + MarkdownDescription: "The id of the Firebase Android App.", + Required: true, + }, + + "project": schema.StringAttribute{ + Description: "The project id of the Firebase Android App.", + MarkdownDescription: "The project id of the Firebase Android App.", + Optional: true, + }, + + "config_filename": schema.StringAttribute{ + Description: "The filename that the configuration artifact for the AndroidApp is typically saved as.", + MarkdownDescription: "The filename that the configuration artifact for the AndroidApp is typically saved as.", + Computed: true, + }, + + "config_file_contents": schema.StringAttribute{ + Description: "The content of the XML configuration file as a base64-encoded string.", + MarkdownDescription: "The content of the XML configuration file as a base64-encoded string.", + Computed: true, + }, + + "id": schema.StringAttribute{ + Description: "Firebase Android App Config identifier", + MarkdownDescription: "Firebase Android App Config identifier", + Computed: true, + }, + }, + } +} + +func (d *GoogleFirebaseAndroidAppConfigDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*fwtransport.FrameworkProviderConfig) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Data Source Configure Type", + fmt.Sprintf("Expected *fwtransport.FrameworkProviderConfig, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.client = p.NewFirebaseClient(p.UserAgent, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + d.project = p.Project +} + +func (d *GoogleFirebaseAndroidAppConfigDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data GoogleFirebaseAndroidAppConfigModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) + + client := firebase.NewProjectsAndroidAppsService(d.client) + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + data.Project = fwresource.GetProjectFramework(data.Project, d.project, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + appName := fmt.Sprintf("projects/%s/androidApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) + data.Id = types.StringValue(appName) + + clientResp, err := client.GetConfig(appName).Do() + if err != nil { + fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAndroidAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + + tflog.Trace(ctx, "read firebase android app config data source") + + data.ConfigFilename = types.StringValue(clientResp.ConfigFilename) + data.ConfigFileContents = types.StringValue(clientResp.ConfigFileContents) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_config_test.go.tmpl new file mode 100644 index 000000000000..722c12cf6514 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_config_test.go.tmpl @@ -0,0 +1,70 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} +import ( + "fmt" + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestAccDataSourceGoogleFirebaseAndroidAppConfig(t *testing.T) { + t.Parallel() + // Framework-based resources and datasources don't work with VCR yet + acctest.SkipIfVcr(t) + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "package_name": "android.app." + acctest.RandString(t, 5), + "display_name": "tf-test Display Name AndroidAppConfig DataSource", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleFirebaseAndroidAppConfig(context), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceFirebaseAndroidAppConfigCheck("data.google_firebase_android_app_config.my_app_config"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFirebaseAndroidAppConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_android_app" "my_app_config" { + project = "%{project_id}" + package_name = "%{package_name}" + display_name = "%{display_name}" +} + +data "google_firebase_android_app_config" "my_app_config" { + project = "%{project_id}" + app_id = google_firebase_android_app.my_app_config.app_id +} +`, context) +} + +func testAccDataSourceFirebaseAndroidAppConfigCheck(datasourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[datasourceName] + if !ok { + return fmt.Errorf("root module has no resource called %s", datasourceName) + } + + if ds.Primary.Attributes["config_filename"] == "" { + return fmt.Errorf("config filename not found in data source") + } + + if ds.Primary.Attributes["config_file_contents"] == "" { + return fmt.Errorf("config file contents not found in data source") + } + + return nil + } +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_test.go.tmpl new file mode 100644 index 000000000000..d39e384354d1 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_android_app_test.go.tmpl @@ -0,0 +1,62 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDataSourceGoogleFirebaseAndroidApp(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "package_name": "android.package.app" + acctest.RandString(t, 4), + "display_name": "tf-test Display Name AndroidApp DataSource", + } + + resourceName := "data.google_firebase_android_app.my_app" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleFirebaseAndroidApp(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( + resourceName, + "google_firebase_android_app.my_app", + map[string]struct{}{ + "deletion_policy": {}, + }, + ), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFirebaseAndroidApp(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_android_app" "my_app" { + project = "%{project_id}" + package_name = "%{package_name}" + display_name = "%{display_name}" + sha1_hashes = ["2145bdf698b8715039bd0e83f2069bed435ac21c"] + sha256_hashes = ["2145bdf698b8715039bd0e83f2069bed435ac21ca1b2c3d4e5f6123456789abc"] +} + +data "google_firebase_android_app" "my_app" { + app_id = google_firebase_android_app.my_app.app_id +} + +data "google_firebase_android_app" "my_app_project" { + project = "%{project_id}" + app_id = google_firebase_android_app.my_app.app_id +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app.go.tmpl new file mode 100644 index 000000000000..54722f76da17 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app.go.tmpl @@ -0,0 +1,49 @@ +package firebase +{{- if ne $.TargetVersionName "ga" }} +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func DataSourceGoogleFirebaseAppleApp() *schema.Resource { + // Generate datasource schema from resource + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceFirebaseAppleApp().Schema) + + // Set 'Required' schema elements + tpgresource.AddRequiredFieldsToSchema(dsSchema, "app_id") + + // Allow specifying a project + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleFirebaseAppleAppRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleFirebaseAppleAppRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + appId := d.Get("app_id") + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + name := fmt.Sprintf("projects/%s/iosApps/%s", project, appId.(string)) + d.SetId(name) + if err := d.Set("name", name); err != nil { + return fmt.Errorf("Error setting name: %s", err) + } + err = resourceFirebaseAppleAppRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", name) + } + return nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_config.go.tmpl new file mode 100644 index 000000000000..f7f5b137fafb --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_config.go.tmpl @@ -0,0 +1,153 @@ +package firebase +{{- if ne $.TargetVersionName "ga" }} + +import ( + "context" + "fmt" + + "google.golang.org/api/firebase/v1beta1" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" +) + +// Ensure the implementation satisfies the expected interfaces +var ( + _ datasource.DataSource = &GoogleFirebaseAppleAppConfigDataSource{} + _ datasource.DataSourceWithConfigure = &GoogleFirebaseAppleAppConfigDataSource{} +) + +func NewGoogleFirebaseAppleAppConfigDataSource() datasource.DataSource { + return &GoogleFirebaseAppleAppConfigDataSource{} +} + +// GoogleFirebaseAppleAppConfigDataSource defines the data source implementation +type GoogleFirebaseAppleAppConfigDataSource struct { + client *firebase.Service + project types.String +} + +type GoogleFirebaseAppleAppConfigModel struct { + Id types.String `tfsdk:"id"` + AppId types.String `tfsdk:"app_id"` + ConfigFilename types.String `tfsdk:"config_filename"` + ConfigFileContents types.String `tfsdk:"config_file_contents"` + Project types.String `tfsdk:"project"` +} + +func (d *GoogleFirebaseAppleAppConfigDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_firebase_apple_app_config" +} + +func (d *GoogleFirebaseAppleAppConfigDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + // This description is used by the documentation generator and the language server. + MarkdownDescription: "A Google Cloud Firebase Apple application configuration", + + Attributes: map[string]schema.Attribute{ + "app_id": schema.StringAttribute{ + Description: "The id of the Firebase iOS App.", + MarkdownDescription: "The id of the Firebase iOS App.", + Required: true, + }, + + "project": schema.StringAttribute{ + Description: "The project id of the Firebase iOS App.", + MarkdownDescription: "The project id of the Firebase iOS App.", + Optional: true, + }, + + "config_filename": schema.StringAttribute{ + Description: "The filename that the configuration artifact for the IosApp is typically saved as.", + MarkdownDescription: "The filename that the configuration artifact for the IosApp is typically saved as.", + Computed: true, + }, + + "config_file_contents": schema.StringAttribute{ + Description: "The content of the XML configuration file as a base64-encoded string.", + MarkdownDescription: "The content of the XML configuration file as a base64-encoded string.", + Computed: true, + }, + + "id": schema.StringAttribute{ + Description: "Firebase Apple App Config identifier", + MarkdownDescription: "Firebase Apple App Config identifier", + Computed: true, + }, + }, + } +} + +func (d *GoogleFirebaseAppleAppConfigDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*fwtransport.FrameworkProviderConfig) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Data Source Configure Type", + fmt.Sprintf("Expected *fwtransport.FrameworkProviderConfig, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.client = p.NewFirebaseClient(p.UserAgent, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + d.project = p.Project +} + +func (d *GoogleFirebaseAppleAppConfigDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data GoogleFirebaseAppleAppConfigModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) + + client := firebase.NewProjectsIosAppsService(d.client) + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + data.Project = fwresource.GetProjectFramework(data.Project, d.project, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + appName := fmt.Sprintf("projects/%s/iosApps/%s/config", data.Project.ValueString(), data.AppId.ValueString()) + data.Id = types.StringValue(appName) + + clientResp, err := client.GetConfig(appName).Do() + if err != nil { + fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseAppleAppConfig %q", data.AppId.ValueString()), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + + tflog.Trace(ctx, "read firebase apple app config data source") + + data.ConfigFilename = types.StringValue(clientResp.ConfigFilename) + data.ConfigFileContents = types.StringValue(clientResp.ConfigFileContents) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_config_test.go.tmpl new file mode 100644 index 000000000000..69b52667f13b --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_config_test.go.tmpl @@ -0,0 +1,85 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} +import ( + "fmt" + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestAccDataSourceGoogleFirebaseAppleAppConfig(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14158 + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "bundle_id": "apple.app." + acctest.RandString(t, 5), + "display_name": "tf-test Display Name AppleAppConfig DataSource", + "app_store_id": 12345, + "team_id": 1234567890, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + Steps: []resource.TestStep{ + { + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "4.58.0", + Source: "hashicorp/google{{- if ne $.TargetVersionName "ga" -}}-{{$.TargetVersionName}}{{- end }}", + }, + }, + Config: testAccDataSourceGoogleFirebaseAppleAppConfig(context), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceFirebaseAppleAppConfigCheck("data.google_firebase_apple_app_config.my_app_config"), + ), + }, + { + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Config: testAccDataSourceGoogleFirebaseAppleAppConfig(context), + Check: resource.ComposeTestCheckFunc( + testAccDataSourceFirebaseAppleAppConfigCheck("data.google_firebase_apple_app_config.my_app_config"), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFirebaseAppleAppConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_apple_app" "my_app_config" { + project = "%{project_id}" + bundle_id = "%{bundle_id}" + display_name = "%{display_name}" + app_store_id = "%{app_store_id}" + team_id = "%{team_id}" +} + +data "google_firebase_apple_app_config" "my_app_config" { + app_id = google_firebase_apple_app.my_app_config.app_id +} +`, context) +} + +func testAccDataSourceFirebaseAppleAppConfigCheck(datasourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + ds, ok := s.RootModule().Resources[datasourceName] + if !ok { + return fmt.Errorf("root module has no resource called %s", datasourceName) + } + + if ds.Primary.Attributes["config_filename"] == "" { + return fmt.Errorf("config filename not found in data source") + } + + if ds.Primary.Attributes["config_file_contents"] == "" { + return fmt.Errorf("config file contents not found in data source") + } + + return nil + } +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_test.go.tmpl new file mode 100644 index 000000000000..c7eef1095932 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_apple_app_test.go.tmpl @@ -0,0 +1,64 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDataSourceGoogleFirebaseAppleApp(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "bundle_id": "apple.app." + acctest.RandString(t, 5), + "display_name": "tf-test Display Name AppleApp DataSource", + "app_store_id": 12345, + "team_id": 1234567890, + } + + resourceName := "data.google_firebase_apple_app.my_app" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleFirebaseAppleApp(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( + resourceName, + "google_firebase_apple_app.my_app", + map[string]struct{}{ + "deletion_policy": {}, + }, + ), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFirebaseAppleApp(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_apple_app" "my_app" { + project = "%{project_id}" + bundle_id = "%{bundle_id}" + display_name = "%{display_name}" + app_store_id = "%{app_store_id}" + team_id = "%{team_id}" +} + +data "google_firebase_apple_app" "my_app" { + app_id = google_firebase_apple_app.my_app.app_id +} + +data "google_firebase_apple_app" "my_app_project" { + project = "%{project_id}" + app_id = google_firebase_apple_app.my_app.app_id +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app.go.tmpl new file mode 100644 index 000000000000..2b6154e14e57 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app.go.tmpl @@ -0,0 +1,49 @@ +package firebase +{{- if ne $.TargetVersionName "ga" }} +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func DataSourceGoogleFirebaseWebApp() *schema.Resource { + // Generate datasource schema from resource + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceFirebaseWebApp().Schema) + + // Set 'Required' schema elements + tpgresource.AddRequiredFieldsToSchema(dsSchema, "app_id") + + // Allow specifying a project + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleFirebaseWebAppRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleFirebaseWebAppRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + appId := d.Get("app_id") + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + name := fmt.Sprintf("projects/%s/webApps/%s", project, appId.(string)) + d.SetId(name) + if err := d.Set("name", name); err != nil { + return fmt.Errorf("Error setting name: %s", err) + } + err = resourceFirebaseWebAppRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", name) + } + return nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app_config.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app_config.go.tmpl new file mode 100644 index 000000000000..7479139af4f4 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app_config.go.tmpl @@ -0,0 +1,205 @@ +package firebase +{{- if ne $.TargetVersionName "ga" }} + +import ( + "context" + "fmt" + + "google.golang.org/api/firebase/v1beta1" + + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" + + "github.com/hashicorp/terraform-provider-google/google/fwmodels" + "github.com/hashicorp/terraform-provider-google/google/fwresource" + "github.com/hashicorp/terraform-provider-google/google/fwtransport" +) + +// Ensure the implementation satisfies the expected interfaces +var ( + _ datasource.DataSource = &GoogleFirebaseWebAppConfigDataSource{} + _ datasource.DataSourceWithConfigure = &GoogleFirebaseWebAppConfigDataSource{} +) + +func NewGoogleFirebaseWebAppConfigDataSource() datasource.DataSource { + return &GoogleFirebaseWebAppConfigDataSource{} +} + +// GoogleFirebaseWebAppConfigDataSource defines the data source implementation +type GoogleFirebaseWebAppConfigDataSource struct { + client *firebase.Service + project types.String +} + +type GoogleFirebaseWebAppConfigModel struct { + Id types.String `tfsdk:"id"` + WebAppId types.String `tfsdk:"web_app_id"` + ApiKey types.String `tfsdk:"api_key"` + AuthDomain types.String `tfsdk:"auth_domain"` + DatabaseUrl types.String `tfsdk:"database_url"` + LocationId types.String `tfsdk:"location_id"` + MeasurementId types.String `tfsdk:"measurement_id"` + MessagingSenderId types.String `tfsdk:"messaging_sender_id"` + StorageBucket types.String `tfsdk:"storage_bucket"` + Project types.String `tfsdk:"project"` +} + +func (d *GoogleFirebaseWebAppConfigDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_firebase_web_app_config" +} + +func (d *GoogleFirebaseWebAppConfigDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + // This description is used by the documentation generator and the language server. + MarkdownDescription: "A Google Cloud Firebase web application configuration", + + Attributes: map[string]schema.Attribute{ + "web_app_id": schema.StringAttribute{ + Description: "The id of the Firebase web App.", + MarkdownDescription: "The id of the Firebase web App.", + Required: true, + }, + + "project": schema.StringAttribute{ + Description: "The project id of the Firebase web App.", + MarkdownDescription: "The project id of the Firebase web App.", + Optional: true, + }, + + "api_key": schema.StringAttribute{ + Description: "The API key associated with the web App.", + MarkdownDescription: "The API key associated with the web App.", + Computed: true, + }, + + "auth_domain": schema.StringAttribute{ + Description: "The domain Firebase Auth configures for OAuth redirects, in the format `projectId.firebaseapp.com`", + MarkdownDescription: "The domain Firebase Auth configures for OAuth redirects, in the format `projectId.firebaseapp.com`", + Computed: true, + }, + + "database_url": schema.StringAttribute{ + Description: "The default Firebase Realtime Database URL.", + MarkdownDescription: "The default Firebase Realtime Database URL.", + Computed: true, + }, + + "location_id": schema.StringAttribute{ + Description: "The ID of the project's default GCP resource location. The location is one of the available GCP resource locations. " + + "This field is omitted if the default GCP resource location has not been finalized yet. To set your project's " + + "default GCP resource location, call defaultLocation.finalize after you add Firebase services to your project.", + MarkdownDescription: "The ID of the project's default GCP resource location. The location is one of the available GCP resource locations. " + + "This field is omitted if the default GCP resource location has not been finalized yet. To set your project's " + + "default GCP resource location, call defaultLocation.finalize after you add Firebase services to your project.", + Computed: true, + }, + + "measurement_id": schema.StringAttribute{ + Description: "The unique Google-assigned identifier of the Google Analytics web stream associated with the Firebase Web App. " + + "Firebase SDKs use this ID to interact with Google Analytics APIs. " + + "This field is only present if the App is linked to a web stream in a Google Analytics App + Web property. " + + "Learn more about this ID and Google Analytics web streams in the Analytics documentation. " + + "To generate a measurementId and link the Web App with a Google Analytics web stream, call projects.addGoogleAnalytics.", + MarkdownDescription: "The unique Google-assigned identifier of the Google Analytics web stream associated with the Firebase Web App. " + + "Firebase SDKs use this ID to interact with Google Analytics APIs. " + + "This field is only present if the App is linked to a web stream in a Google Analytics App + Web property. " + + "Learn more about this ID and Google Analytics web streams in the Analytics documentation. " + + "To generate a measurementId and link the Web App with a Google Analytics web stream, call projects.addGoogleAnalytics.", + Computed: true, + }, + + "messaging_sender_id": schema.StringAttribute{ + Description: "The sender ID for use with Firebase Cloud Messaging.", + MarkdownDescription: "The sender ID for use with Firebase Cloud Messaging.", + Computed: true, + }, + + "storage_bucket": schema.StringAttribute{ + Description: "The default Cloud Storage for Firebase storage bucket name.", + MarkdownDescription: "The default Cloud Storage for Firebase storage bucket name.", + Computed: true, + }, + + "id": schema.StringAttribute{ + Description: "Firebase Web App Config identifier", + MarkdownDescription: "Firebase Web App Config identifier", + Computed: true, + }, + }, + } +} + +func (d *GoogleFirebaseWebAppConfigDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + p, ok := req.ProviderData.(*fwtransport.FrameworkProviderConfig) + if !ok { + resp.Diagnostics.AddError( + "Unexpected Data Source Configure Type", + fmt.Sprintf("Expected *fwtransport.FrameworkProviderConfig, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + return + } + + d.client = p.NewFirebaseClient(p.UserAgent, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + d.project = p.Project +} + +func (d *GoogleFirebaseWebAppConfigDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data GoogleFirebaseWebAppConfigModel + var metaData *fwmodels.ProviderMetaModel + + // Read Provider meta into the meta model + resp.Diagnostics.Append(req.ProviderMeta.Get(ctx, &metaData)...) + if resp.Diagnostics.HasError() { + return + } + + d.client.UserAgent = fwtransport.GenerateFrameworkUserAgentString(metaData, d.client.UserAgent) + + client := firebase.NewProjectsWebAppsService(d.client) + + // Read Terraform configuration data into the model + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + data.Project = fwresource.GetProjectFramework(data.Project, d.project, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + appName := fmt.Sprintf("projects/%s/webApps/%s/config", data.Project.ValueString(), data.WebAppId.ValueString()) + data.Id = data.WebAppId + + clientResp, err := client.GetConfig(appName).Do() + if err != nil { + fwtransport.HandleDatasourceNotFoundError(ctx, err, &resp.State, fmt.Sprintf("dataSourceFirebaseWebAppConfig %q", data.WebAppId.ValueString()), &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + } + + tflog.Trace(ctx, "read firebase web app config data source") + + data.ApiKey = types.StringValue(clientResp.ApiKey) + data.AuthDomain = types.StringValue(clientResp.AuthDomain) + data.DatabaseUrl = types.StringValue(clientResp.DatabaseURL) + data.LocationId = types.StringValue(clientResp.LocationId) + data.MeasurementId = types.StringValue(clientResp.MeasurementId) + data.MessagingSenderId = types.StringValue(clientResp.MessagingSenderId) + data.StorageBucket = types.StringValue(clientResp.StorageBucket) + + // Save data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app_test.go.tmpl new file mode 100644 index 000000000000..19be22fac1df --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/data_source_google_firebase_web_app_test.go.tmpl @@ -0,0 +1,58 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDataSourceGoogleFirebaseWebApp(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "display_name": "tf_test Display Name WebApp DataSource", + } + + resourceName := "data.google_firebase_web_app.my_app" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleFirebaseWebApp(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceStateWithIgnores( + resourceName, + "google_firebase_web_app.my_app", + map[string]struct{}{ + "deletion_policy": {}, + }, + ), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFirebaseWebApp(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_web_app" "my_app" { + project = "%{project_id}" + display_name = "%{display_name}" +} + +data "google_firebase_web_app" "my_app" { + app_id = google_firebase_web_app.my_app.app_id +} + +data "google_firebase_web_app" "my_app_project" { + project = "%{project_id}" + app_id = google_firebase_web_app.my_app.app_id +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/resource_firebase_android_app_update_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/resource_firebase_android_app_update_test.go.tmpl new file mode 100644 index 000000000000..170445fc6f4f --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/resource_firebase_android_app_update_test.go.tmpl @@ -0,0 +1,85 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccFirebaseAndroidApp_update(t *testing.T) { + t.Parallel() + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "package_name": "android.package.app" + acctest.RandString(t, 4), + "random_suffix": acctest.RandString(t, 10), + "display_name": "tf-test Display Name N", + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAndroidApp(context, "", "key1"), + }, + { + Config: testAccFirebaseAndroidApp(context, "2", "key2"), + }, + }, + }) +} + +func testAccFirebaseAndroidApp(context map[string]interface{}, update string, apiKeyLabel string) string { + context["display_name"] = context["display_name"].(string) + update + context["api_key_label"] = apiKeyLabel + return acctest.Nprintf(` +resource "google_firebase_android_app" "update" { + provider = google-beta + project = "%{project_id}" + + package_name = "%{package_name}" + display_name = "%{display_name} %{random_suffix}" + sha1_hashes = ["2145bdf698b8715039bd0e83f2069bed435ac21c"] + sha256_hashes = ["2145bdf698b8715039bd0e83f2069bed435ac21ca1b2c3d4e5f6123456789abc"] + api_key_id = google_apikeys_key.%{api_key_label}.uid +} + +resource "google_apikeys_key" "key1" { + provider = google-beta + project = "%{project_id}" + + name = "tf-test-api-key1%{random_suffix}" + display_name = "Test api key 1" + + restrictions { + android_key_restrictions { + allowed_applications { + package_name = "%{package_name}" + sha1_fingerprint = "2145bdf698b8715039bd0e83f2069bed435ac21c" + } + } + } +} + +resource "google_apikeys_key" "key2" { + provider = google-beta + project = "%{project_id}" + + name = "tf-test-api-key2%{random_suffix}" + display_name = "Test api key 2" + + restrictions { + android_key_restrictions { + allowed_applications { + package_name = "%{package_name}" + sha1_fingerprint = "2145bdf698b8715039bd0e83f2069bed435ac21c" + } + } + } +} +`, context) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/resource_firebase_apple_app_update_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/resource_firebase_apple_app_update_test.go.tmpl new file mode 100644 index 000000000000..0dc1f4d4afdc --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/resource_firebase_apple_app_update_test.go.tmpl @@ -0,0 +1,81 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccFirebaseAppleApp_update(t *testing.T) { + t.Parallel() + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "bundle_id": "apple.app.12345", + "random_suffix": acctest.RandString(t, 10), + "display_name": "tf-test Display Name N", + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppleApp(context, 12345, "1", "key1"), + }, + { + Config: testAccFirebaseAppleApp(context, 67890, "2", "key2"), + }, + }, + }) +} + +func testAccFirebaseAppleApp(context map[string]interface{}, appStoreId int, delta string, apiKeyLabel string) string { + context["display_name"] = context["display_name"].(string) + delta + context["app_store_id"] = appStoreId + context["team_id"] = "123456789" + delta + context["api_key_label"] = apiKeyLabel + return acctest.Nprintf(` +resource "google_firebase_apple_app" "update" { + provider = google-beta + project = "%{project_id}" + + bundle_id = "%{bundle_id}" + display_name = "%{display_name} %{random_suffix}" + app_store_id = "%{app_store_id}" + team_id = "%{team_id}" + api_key_id = google_apikeys_key.%{api_key_label}.uid +} + +resource "google_apikeys_key" "key1" { + provider = google-beta + project = "%{project_id}" + + name = "tf-test-api-key1%{random_suffix}" + display_name = "Test api key 1" + + restrictions { + ios_key_restrictions { + allowed_bundle_ids = ["%{bundle_id}"] + } + } +} + +resource "google_apikeys_key" "key2" { + provider = google-beta + project = "%{project_id}" + + name = "tf-test-api-key2%{random_suffix}" + display_name = "Test api key 2" + + restrictions { + ios_key_restrictions { + allowed_bundle_ids = ["%{bundle_id}"] + } + } +} +`, context) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/resource_firebase_project_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/resource_firebase_project_test.go.tmpl new file mode 100644 index 000000000000..e73d9ff08d5b --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/resource_firebase_project_test.go.tmpl @@ -0,0 +1,58 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccFirebaseProject_destroyAndReapply(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseProject_firebaseProjectBasicExample(context), + }, + { + Config: testAccFirebaseProject_firebaseProjectBasicExampleDestroyed(context), + }, + { + Config: testAccFirebaseProject_firebaseProjectBasicExample(context), + }, + { + ResourceName: "google_firebase_project.default", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccFirebaseProject_firebaseProjectBasicExampleDestroyed(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "default" { + provider = google-beta + + project_id = "tf-test-my-project%{random_suffix}" + name = "tf-test-my-project%{random_suffix}" + org_id = "%{org_id}" + + labels = { + "firebase" = "enabled" + } +} +`, context) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/firebase/go/resource_firebase_web_app_test.go.tmpl b/mmv1/third_party/terraform/services/firebase/go/resource_firebase_web_app_test.go.tmpl new file mode 100644 index 000000000000..ad9c9fd4bdc9 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebase/go/resource_firebase_web_app_test.go.tmpl @@ -0,0 +1,186 @@ +package firebase_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccFirebaseWebApp_firebaseWebAppFull(t *testing.T) { + // TODO: https://github.com/hashicorp/terraform-provider-google/issues/14158 + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "display_name": "tf-test Display Name N", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseWebApp_firebaseWebAppFull(context, "", "key1"), + }, + { + Config: testAccFirebaseWebApp_firebaseWebAppFull(context, "2", "key2"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.default", "api_key"), + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.default", "auth_domain"), + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.default", "storage_bucket"), + ), + }, + { + Config: testAccFirebaseWebApp_firebaseWebAppFull(context, "", "key1"), + }, + { + Config: testAccFirebaseWebApp_firebaseWebAppFull(context, "2", "key2"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.default", "api_key"), + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.default", "auth_domain"), + resource.TestCheckResourceAttrSet("data.google_firebase_web_app_config.default", "storage_bucket"), + ), + }, + }, + }) +} + +func testAccFirebaseWebApp_firebaseWebAppFull(context map[string]interface{}, update string, apiKeyLabel string) string { + context["display_name"] = context["display_name"].(string) + update + context["api_key_label"] = apiKeyLabel + return acctest.Nprintf(` +resource "google_apikeys_key" "key1" { + provider = google-beta + name = "tf-test-api-key1%{random_suffix}" + display_name = "Test api key 1" + project = "%{project_id}" +} + +resource "google_apikeys_key" "key2" { + provider = google-beta + name = "tf-test-api-key2%{random_suffix}" + display_name = "Test api key 2" + project = "%{project_id}" +} + +resource "google_firebase_web_app" "default" { + provider = google-beta + project = "%{project_id}" + display_name = "%{display_name} %{random_suffix}" + api_key_id = google_apikeys_key.%{api_key_label}.uid + deletion_policy = "DELETE" +} + +data "google_firebase_web_app_config" "default" { + provider = google-beta + web_app_id = google_firebase_web_app.default.app_id +} +`, context) +} + +func TestAccFirebaseWebApp_firebaseWebAppSkipDelete(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + "display_name": "tf-test Display Name N", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseWebAppNotDestroyedProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseWebApp_firebaseWebAppSkipDelete(context, ""), + }, + { + ResourceName: "google_firebase_web_app.skip_delete", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_policy", "project"}, + }, + }, + }) +} + +func testAccFirebaseWebApp_firebaseWebAppSkipDelete(context map[string]interface{}, update string) string { + // Create a new project so we can clean up the project entirely + return acctest.Nprintf(` +resource "google_project" "default" { + provider = google-beta + + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + labels = { + "firebase" = "enabled" + } +} + +resource "google_firebase_project" "default" { + provider = google-beta + project = google_project.default.project_id +} + +resource "google_firebase_web_app" "skip_delete" { + provider = google-beta + project = google_firebase_project.default.project + display_name = "%{display_name} %{random_suffix}" + deletion_policy = "ABANDON" +} +`, context) +} + +func testAccCheckFirebaseWebAppNotDestroyedProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_firebase_web_app" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}FirebaseBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err != nil { + return fmt.Errorf("FirebaseWebApp doesn't exists at %s", url) + } + } + + return nil + } +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_app_attest_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_app_attest_config_test.go.tmpl new file mode 100644 index 000000000000..75df876e7293 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_app_attest_config_test.go.tmpl @@ -0,0 +1,61 @@ +package firebaseappcheck_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseAppCheckAppAttestConfig_firebaseAppCheckAppAttestConfigUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "team_id": "9987654321", + "random_suffix": acctest.RandString(t, 10), + "token_ttl": "7200s", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppCheckAppAttestConfig_firebaseAppCheckAppAttestConfigMinimalExample(context), + }, + { + ResourceName: "google_firebase_app_check_app_attest_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_id"}, + }, + { + Config: testAccFirebaseAppCheckAppAttestConfig_firebaseAppCheckAppAttestConfigFullExample(context), + }, + { + ResourceName: "google_firebase_app_check_app_attest_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_id"}, + }, + { + Config: testAccFirebaseAppCheckAppAttestConfig_firebaseAppCheckAppAttestConfigMinimalExample(context), + }, + { + ResourceName: "google_firebase_app_check_app_attest_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_id"}, + }, + }, + }) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_debug_token_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_debug_token_test.go.tmpl new file mode 100644 index 000000000000..bd8a2fd50cf2 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_debug_token_test.go.tmpl @@ -0,0 +1,87 @@ +package firebaseappcheck_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseAppCheckDebugToken_firebaseAppCheckDebugTokenUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "display_name": "Debug Token 1", + "token": "5E728315-E121-467F-BCA1-1FE71130BB98", + } + + contextUpdated := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "display_name": "Debug Token 2", + "token": "5E728315-E121-467F-BCA1-1FE71130BB98", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + "time": {}, + }, + CheckDestroy: testAccCheckFirebaseAppCheckDebugTokenDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppCheckDebugToken_firebaseAppCheckDebugTokenTemplate(context), + }, + { + ResourceName: "google_firebase_app_check_debug_token.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"token", "app_id"}, + }, + { + Config: testAccFirebaseAppCheckDebugToken_firebaseAppCheckDebugTokenTemplate(contextUpdated), + }, + { + ResourceName: "google_firebase_app_check_debug_token.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"token", "app_id"}, + }, + }, + }) +} + +func testAccFirebaseAppCheckDebugToken_firebaseAppCheckDebugTokenTemplate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_web_app" "default" { + provider = google-beta + + project = "%{project_id}" + display_name = "Web App for debug token" +} + +# It takes a while for App Check to recognize the new app +# If your app already exists, you don't have to wait 30 seconds. +resource "time_sleep" "wait_30s" { + depends_on = [google_firebase_web_app.default] + create_duration = "30s" +} + +resource "google_firebase_app_check_debug_token" "default" { + provider = google-beta + + project = "%{project_id}" + app_id = google_firebase_web_app.default.app_id + display_name = "%{display_name}" + token = "%{token}" + + depends_on = [time_sleep.wait_30s] +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_device_check_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_device_check_config_test.go.tmpl new file mode 100644 index 000000000000..1282e6dc5623 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_device_check_config_test.go.tmpl @@ -0,0 +1,62 @@ +package firebaseappcheck_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseAppCheckDeviceCheckConfig_firebaseAppCheckDeviceCheckConfigUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "team_id": "9987654321", + "private_key_path": "test-fixtures/private-key.p8", + "token_ttl": "3900s", + "random_suffix": acctest.RandString(t, 10), + } + + contextUpdated := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "team_id": "9987654321", + "private_key_path": "test-fixtures/private-key-2.p8", + "token_ttl": "7200s", + // Bundle ID needs to be the same between updates but different between tests + "random_suffix": context["random_suffix"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppCheckDeviceCheckConfig_firebaseAppCheckDeviceCheckConfigFullExample(context), + }, + { + ResourceName: "google_firebase_app_check_device_check_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"private_key", "app_id"}, + }, + { + Config: testAccFirebaseAppCheckDeviceCheckConfig_firebaseAppCheckDeviceCheckConfigFullExample(contextUpdated), + }, + { + ResourceName: "google_firebase_app_check_device_check_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"private_key", "app_id"}, + }, + }, + }) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_play_integrity_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_play_integrity_config_test.go.tmpl new file mode 100644 index 000000000000..698c79f946a7 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_play_integrity_config_test.go.tmpl @@ -0,0 +1,60 @@ +package firebaseappcheck_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseAppCheckPlayIntegrityConfig_firebaseAppCheckPlayIntegrityConfigUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "token_ttl": "7200s", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppCheckPlayIntegrityConfig_firebaseAppCheckPlayIntegrityConfigMinimalExample(context), + }, + { + ResourceName: "google_firebase_app_check_play_integrity_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_id"}, + }, + { + Config: testAccFirebaseAppCheckPlayIntegrityConfig_firebaseAppCheckPlayIntegrityConfigFullExample(context), + }, + { + ResourceName: "google_firebase_app_check_play_integrity_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_id"}, + }, + { + Config: testAccFirebaseAppCheckPlayIntegrityConfig_firebaseAppCheckPlayIntegrityConfigMinimalExample(context), + }, + { + ResourceName: "google_firebase_app_check_play_integrity_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_id"}, + }, + }, + }) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_recaptcha_enterprise_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_recaptcha_enterprise_config_test.go.tmpl new file mode 100644 index 000000000000..4b663c534a2a --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_recaptcha_enterprise_config_test.go.tmpl @@ -0,0 +1,59 @@ +package firebaseappcheck_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseAppCheckRecaptchaEnterpriseConfig_firebaseAppCheckRecaptchaEnterpriseConfigUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "token_ttl": "7200s", + "site_key": "6LdpMXIpAAAAANkwWQPgEdjEhal7ugkH9RK9ytuw", + "random_suffix": acctest.RandString(t, 10), + } + + contextUpdated := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "token_ttl": "3800s", + "site_key": "7LdpMXIpAAAAANkwWQPgEdjEhal7ugkH9RK9ytuw", + "random_suffix": context["random_suffix"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppCheckRecaptchaEnterpriseConfig_firebaseAppCheckRecaptchaEnterpriseConfigBasicExample(context), + }, + { + ResourceName: "google_firebase_app_check_recaptcha_enterprise_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_id"}, + }, + { + Config: testAccFirebaseAppCheckRecaptchaEnterpriseConfig_firebaseAppCheckRecaptchaEnterpriseConfigBasicExample(contextUpdated), + }, + { + ResourceName: "google_firebase_app_check_recaptcha_enterprise_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"app_id"}, + }, + }, + }) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_recaptcha_v3_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_recaptcha_v3_config_test.go.tmpl new file mode 100644 index 000000000000..045d890e5104 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_recaptcha_v3_config_test.go.tmpl @@ -0,0 +1,59 @@ +package firebaseappcheck_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseAppCheckRecaptchaV3Config_firebaseAppCheckRecaptchaV3ConfigUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "token_ttl": "7200s", + "site_secret": "6Lf9YnQpAAAAAC3-MHmdAllTbPwTZxpUw5d34YzX", + "random_suffix": acctest.RandString(t, 10), + } + + contextUpdated := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "token_ttl": "3800s", + "site_secret": "7Lf9YnQpAAAAAC3-MHmdAllTbPwTZxpUw5d34YzX", + "random_suffix": context["random_suffix"], + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "random": {}, + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppCheckRecaptchaV3Config_firebaseAppCheckRecaptchaV3ConfigBasicExample(context), + }, + { + ResourceName: "google_firebase_app_check_recaptcha_v3_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"site_secret", "app_id"}, + }, + { + Config: testAccFirebaseAppCheckRecaptchaV3Config_firebaseAppCheckRecaptchaV3ConfigBasicExample(contextUpdated), + }, + { + ResourceName: "google_firebase_app_check_recaptcha_v3_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"site_secret", "app_id"}, + }, + }, + }) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_service_config_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_service_config_test.go.tmpl new file mode 100644 index 000000000000..abb7f2a65109 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseappcheck/go/resource_firebase_app_check_service_config_test.go.tmpl @@ -0,0 +1,137 @@ +// Copyright (c) HashiCorp, Inc. +// SPDX-License-Identifier: MPL-2.0 +package firebaseappcheck_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseAppCheckServiceConfig_firebaseAppCheckServiceConfigUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseAppCheckServiceConfigDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirebaseAppCheckServiceConfig_firebaseAppCheckServiceConfigUpdate(context, "UNENFORCED"), + }, + { + ResourceName: "google_firebase_app_check_service_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"service_id"}, + }, + { + Config: testAccFirebaseAppCheckServiceConfig_firebaseAppCheckServiceConfigUpdate(context, "ENFORCED"), + }, + { + ResourceName: "google_firebase_app_check_service_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"service_id"}, + }, + { + Config: testAccFirebaseAppCheckServiceConfig_firebaseAppCheckServiceConfigUpdate(context, ""), + }, + { + ResourceName: "google_firebase_app_check_service_config.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"service_id"}, + }, + }, + }) +} + +func testAccFirebaseAppCheckServiceConfig_firebaseAppCheckServiceConfigUpdate(context map[string]interface{}, enforcementMode string) string { + context["enforcement_mode"] = enforcementMode + return acctest.Nprintf(` +resource "google_project" "default" { + provider = google-beta + project_id = "tf-test-appcheck%{random_suffix}" + name = "tf-test-appcheck%{random_suffix}" + org_id = "%{org_id}" + labels = { + "firebase" = "enabled" + } +} + +resource "google_project_service" "firebase" { + provider = google-beta + project = google_project.default.project_id + service = "firebase.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "database" { + provider = google-beta + project = google_project.default.project_id + service = "firebasedatabase.googleapis.com" + disable_on_destroy = false + depends_on = [ + google_project_service.firebase, + ] +} + +resource "google_project_service" "appcheck" { + provider = google-beta + project = google_project.default.project_id + service = "firebaseappcheck.googleapis.com" + disable_on_destroy = false + depends_on = [ + google_project_service.database, + ] +} + +resource "google_firebase_project" "default" { + provider = google-beta + project = google_project.default.project_id + + depends_on = [ + google_project_service.appcheck, + ] +} + +# It takes a while for the new project to be ready for a database +resource "time_sleep" "wait_30s" { + depends_on = [google_firebase_project.default] + create_duration = "30s" +} + +resource "google_firebase_database_instance" "default" { + provider = google-beta + project = google_firebase_project.default.project + region = "us-central1" + instance_id = "tf-test-appcheck%{random_suffix}-default-rtdb" + type = "DEFAULT_DATABASE" + + depends_on = [time_sleep.wait_30s] +} + +resource "google_firebase_app_check_service_config" "default" { + provider = google-beta + project = google_firebase_project.default.project + service_id = "firebasedatabase.googleapis.com" + enforcement_mode = "%{enforcement_mode}" + + depends_on = [google_firebase_database_instance.default] +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebasedatabase/go/resource_firebase_database_instance_test.go.tmpl b/mmv1/third_party/terraform/services/firebasedatabase/go/resource_firebase_database_instance_test.go.tmpl new file mode 100644 index 000000000000..524aafe661ca --- /dev/null +++ b/mmv1/third_party/terraform/services/firebasedatabase/go/resource_firebase_database_instance_test.go.tmpl @@ -0,0 +1,74 @@ +package firebasedatabase_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func testAccFirebaseDatabaseInstance_firebaseDatabaseInstanceInState(context map[string]interface{}, state string) string { + context["desired_state"] = state + return acctest.Nprintf(` +resource "google_firebase_database_instance" "updated" { + provider = google-beta + project = "%{project_id}" + region = "%{region}" + instance_id = "tf-test-state-change-db%{random_suffix}" + desired_state = "%{desired_state}" +} +`, context) +} + +func TestAccFirebaseDatabaseInstance_firebaseDatabaseInstanceStateChange(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "region": envvar.GetTestRegionFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseDatabaseInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseDatabaseInstance_firebaseDatabaseInstanceInState(context, "ACTIVE"), + }, + { + ResourceName: "google_firebase_database_instance.updated", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region", "instance_id", "desired_state"}, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_firebase_database_instance.updated", "database_url"), + ), + }, + { + Config: testAccFirebaseDatabaseInstance_firebaseDatabaseInstanceInState(context, "DISABLED"), + }, + { + ResourceName: "google_firebase_database_instance.updated", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region", "instance_id", "desired_state"}, + }, + { + Config: testAccFirebaseDatabaseInstance_firebaseDatabaseInstanceInState(context, "ACTIVE"), + }, + { + ResourceName: "google_firebase_database_instance.updated", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region", "instance_id", "desired_state"}, + }, + }, + }) +} + +{{ end }} + diff --git a/mmv1/third_party/terraform/services/firebaseextensions/go/resource_firebase_extensions_instance_test.go.tmpl b/mmv1/third_party/terraform/services/firebaseextensions/go/resource_firebase_extensions_instance_test.go.tmpl new file mode 100644 index 000000000000..836db0b276d7 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebaseextensions/go/resource_firebase_extensions_instance_test.go.tmpl @@ -0,0 +1,149 @@ +package firebaseextensions_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccFirebaseExtensionsInstance_firebaseExtentionsInstanceResizeImageUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "location": "us-central1", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseExtensionsInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseExtensionsInstance_firebaseExtentionsInstanceResizeImageBefore(context), + }, + { + ResourceName: "google_firebase_extensions_instance.resize_image", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"instance_id"}, + }, + { + Config: testAccFirebaseExtensionsInstance_firebaseExtentionsInstanceResizeImageAfter(context), + }, + { + ResourceName: "google_firebase_extensions_instance.resize_image", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"instance_id"}, + }, + }, + }) +} + +func testAccFirebaseExtensionsInstance_firebaseExtentionsInstanceResizeImageBefore(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_bucket" "images" { + provider = google-beta + project = "%{project_id}" + name = "tf-test-bucket-id%{random_suffix}" + location = "US" + uniform_bucket_level_access = true + + # Delete all objects when the bucket is deleted + force_destroy = true +} + +resource "google_firebase_extensions_instance" "resize_image" { + provider = google-beta + project = "%{project_id}" + instance_id = "tf-test-storage-resize-images%{random_suffix}" + config { + extension_ref = "firebase/storage-resize-images" + extension_version = "0.2.2" + + # The following params apply to the firebase/storage-resize-images extension. + # Different extensions may have different params + params = { + DELETE_ORIGINAL_FILE = false + MAKE_PUBLIC = false + IMAGE_TYPE = "jpeg" + IS_ANIMATED = true + FUNCTION_MEMORY = 1024 + DO_BACKFILL = false + IMG_SIZES = "200x200" + IMG_BUCKET = google_storage_bucket.images.name + } + + system_params = { + "firebaseextensions.v1beta.function/location" = "%{location}" + "firebaseextensions.v1beta.function/maxInstances" = 3000 + "firebaseextensions.v1beta.function/minInstances" = 0 + "firebaseextensions.v1beta.function/vpcConnectorEgressSettings" = "VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED" + } + + allowed_event_types = [ + "firebase.extensions.storage-resize-images.v1.onCompletion" + ] + + eventarc_channel = "projects/%{project_id}/locations/%{location}/channels/firebase" + } +} +`, context) +} + +func testAccFirebaseExtensionsInstance_firebaseExtentionsInstanceResizeImageAfter(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_storage_bucket" "images" { + provider = google-beta + project = "%{project_id}" + name = "tf-test-bucket-id%{random_suffix}" + location = "US" + uniform_bucket_level_access = true + + # Delete all objects when the bucket is deleted + force_destroy = true +} + +resource "google_firebase_extensions_instance" "resize_image" { + provider = google-beta + project = "%{project_id}" + instance_id = "tf-test-storage-resize-images%{random_suffix}" + config { + extension_ref = "firebase/storage-resize-images" + extension_version = "0.2.2" + + # The following params apply to the firebase/storage-resize-images extension. + # Different extensions may have different params + params = { + # Changed params + DELETE_ORIGINAL_FILE = true + MAKE_PUBLIC = true + IMAGE_TYPE = "jpeg" + IS_ANIMATED = true + FUNCTION_MEMORY = 512 + DO_BACKFILL = true + IMG_SIZES = "400x400" + IMG_BUCKET = google_storage_bucket.images.name + } + + system_params = { + "firebaseextensions.v1beta.function/location" = "%{location}" + # Changed params + "firebaseextensions.v1beta.function/maxInstances" = 100 + "firebaseextensions.v1beta.function/minInstances" = 0 + "firebaseextensions.v1beta.function/vpcConnectorEgressSettings" = "VPC_CONNECTOR_EGRESS_SETTINGS_UNSPECIFIED" + } + + # Disable events + } +} +`, context) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/firebasehosting/go/data_source_google_firebase_hosting_channel.go.tmpl b/mmv1/third_party/terraform/services/firebasehosting/go/data_source_google_firebase_hosting_channel.go.tmpl new file mode 100644 index 000000000000..38f8479410db --- /dev/null +++ b/mmv1/third_party/terraform/services/firebasehosting/go/data_source_google_firebase_hosting_channel.go.tmpl @@ -0,0 +1,47 @@ +package firebasehosting +{{- if ne $.TargetVersionName "ga" }} +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func DataSourceGoogleFirebaseHostingChannel() *schema.Resource { + // Generate datasource schema from resource + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceFirebaseHostingChannel().Schema) + + // Set 'Required' schema elements + tpgresource.AddRequiredFieldsToSchema(dsSchema, "site_id", "channel_id") + + return &schema.Resource{ + Read: dataSourceGoogleFirebaseHostingChannelRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleFirebaseHostingChannelRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "sites/{{"{{"}}site_id{{"}}"}}/channels/{{"{{"}}channel_id{{"}}"}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + err = resourceFirebaseHostingChannelRead(d, meta) + if err != nil { + return err + } + + if err := tpgresource.SetDataSourceLabels(d); err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + return nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebasehosting/go/data_source_google_firebase_hosting_channel_test.go.tmpl b/mmv1/third_party/terraform/services/firebasehosting/go/data_source_google_firebase_hosting_channel_test.go.tmpl new file mode 100644 index 000000000000..0b7fa3cc6ebf --- /dev/null +++ b/mmv1/third_party/terraform/services/firebasehosting/go/data_source_google_firebase_hosting_channel_test.go.tmpl @@ -0,0 +1,60 @@ +package firebasehosting_test +{{- if ne $.TargetVersionName "ga" }} +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDataSourceGoogleFirebaseHostingChannel(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceGoogleFirebaseHostingChannel(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState( + "data.google_firebase_hosting_channel.channel", + "google_firebase_hosting_channel.channel", + ), + ), + }, + }, + }) +} + +func testAccDataSourceGoogleFirebaseHostingChannel(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_hosting_site" "default" { + project = "%{project_id}" + site_id = "tf-test-site-with-channel%{random_suffix}" +} + +resource "google_firebase_hosting_channel" "channel" { + site_id = google_firebase_hosting_site.default.site_id + channel_id = "tf-test-channel%{random_suffix}" + + labels = { + foo = "bar" + } +} + +data "google_firebase_hosting_channel" "channel" { + site_id = google_firebase_hosting_site.default.site_id + channel_id = "tf-test-channel%{random_suffix}" + + depends_on = [google_firebase_hosting_channel.channel] +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_channel_test.go.tmpl b/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_channel_test.go.tmpl new file mode 100644 index 000000000000..5be59c4fa773 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_channel_test.go.tmpl @@ -0,0 +1,196 @@ +package firebasehosting_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccFirebaseHostingChannel_firebasehostingChannelUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseHostingChannelDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseHostingChannel_firebasehostingChannelBasic(context), + }, + { + ResourceName: "google_firebase_hosting_channel.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "site_id", "channel_id"}, + }, + { + Config: testAccFirebaseHostingChannel_firebasehostingChannelTtl(context, "8600s"), + }, + { + ResourceName: "google_firebase_hosting_channel.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "site_id", "channel_id"}, + }, + { + Config: testAccFirebaseHostingChannel_firebasehostingChannelTtl(context, "86400s"), + }, + { + ResourceName: "google_firebase_hosting_channel.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "site_id", "channel_id"}, + }, + { + Config: testAccFirebaseHostingChannel_firebasehostingChannelRetainedReleaseCount(context, 30), + }, + { + ResourceName: "google_firebase_hosting_channel.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "site_id", "channel_id"}, + }, + { + Config: testAccFirebaseHostingChannel_firebasehostingChannelRetainedReleaseCount(context, 20), + }, + { + ResourceName: "google_firebase_hosting_channel.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "site_id", "channel_id"}, + }, + { + Config: testAccFirebaseHostingChannel_firebasehostingChannelLabels(context), + }, + { + ResourceName: "google_firebase_hosting_channel.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "site_id", "channel_id", "labels", "terraform_labels"}, + }, + { + Config: testAccFirebaseHostingChannel_firebasehostingChannelMultipleFields(context), + }, + { + ResourceName: "google_firebase_hosting_channel.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "site_id", "channel_id", "labels", "terraform_labels"}, + }, + { + Config: testAccFirebaseHostingChannel_firebasehostingChannelBasic(context), + }, + { + ResourceName: "google_firebase_hosting_channel.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "site_id", "channel_id"}, + }, + }, + }) +} + +func testAccFirebaseHostingChannel_firebasehostingChannelBasic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_hosting_site" "default" { + provider = google-beta + project = "%{project_id}" + site_id = "tf-test-site-with-channel%{random_suffix}" +} + +resource "google_firebase_hosting_channel" "update" { + provider = google-beta + site_id = google_firebase_hosting_site.default.site_id + channel_id = "tf-test-channel-update%{random_suffix}" +} +`, context) +} + +func testAccFirebaseHostingChannel_firebasehostingChannelTtl(context map[string]interface{}, ttl string) string { + context["ttl"] = ttl + return acctest.Nprintf(` +resource "google_firebase_hosting_site" "default" { + provider = google-beta + project = "%{project_id}" + site_id = "tf-test-site-with-channel%{random_suffix}" +} + +resource "google_firebase_hosting_channel" "update" { + provider = google-beta + site_id = google_firebase_hosting_site.default.site_id + channel_id = "tf-test-channel-update%{random_suffix}" + ttl = "%{ttl}" +} +`, context) +} + +func testAccFirebaseHostingChannel_firebasehostingChannelRetainedReleaseCount(context map[string]interface{}, retainedReleaseCount int) string { + context["retained_release_count"] = retainedReleaseCount + return acctest.Nprintf(` +resource "google_firebase_hosting_site" "default" { + provider = google-beta + project = "%{project_id}" + site_id = "tf-test-site-with-channel%{random_suffix}" +} + +resource "google_firebase_hosting_channel" "update" { + provider = google-beta + site_id = google_firebase_hosting_site.default.site_id + channel_id = "tf-test-channel-update%{random_suffix}" + ttl = "86400s" + retained_release_count = %{retained_release_count} +} +`, context) +} + +func testAccFirebaseHostingChannel_firebasehostingChannelLabels(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_hosting_site" "default" { + provider = google-beta + project = "%{project_id}" + site_id = "tf-test-site-with-channel%{random_suffix}" +} + +resource "google_firebase_hosting_channel" "update" { + provider = google-beta + site_id = google_firebase_hosting_site.default.site_id + channel_id = "tf-test-channel-update%{random_suffix}" + ttl = "86400s" + retained_release_count = 10 + labels = { + "some-key": "some-value" + } +} +`, context) +} + +func testAccFirebaseHostingChannel_firebasehostingChannelMultipleFields(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_hosting_site" "default" { + provider = google-beta + project = "%{project_id}" + site_id = "tf-test-site-with-channel%{random_suffix}" +} + +resource "google_firebase_hosting_channel" "update" { + provider = google-beta + site_id = google_firebase_hosting_site.default.site_id + channel_id = "tf-test-channel-update%{random_suffix}" + ttl = "86400s" + retained_release_count = 40 + labels = { + "some-key-2": "some-value-2" + } +} +`, context) +} + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_custom_domain_test.go.tmpl b/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_custom_domain_test.go.tmpl new file mode 100644 index 000000000000..e16581346c11 --- /dev/null +++ b/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_custom_domain_test.go.tmpl @@ -0,0 +1,81 @@ +package firebasehosting_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccFirebaseHostingCustomDomain_firebasehostingCustomdomainUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "site_id": envvar.GetTestProjectFromEnv(), + "custom_domain": "update.source.domain.com", + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseHostingCustomDomainDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseHostingCustomDomain_firebasehostingCustomdomainBeforeUpdate(context), + }, + { + ResourceName: "google_firebase_hosting_custom_domain.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"site_id", "custom_domain", "wait_dns_verification"}, + }, + { + Config: testAccFirebaseHostingCustomDomain_firebasehostingCustomdomainAfterUpdate(context), + }, + { + ResourceName: "google_firebase_hosting_custom_domain.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"site_id", "custom_domain", "wait_dns_verification"}, + }, + }, + }) +} + +func testAccFirebaseHostingCustomDomain_firebasehostingCustomdomainBeforeUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_hosting_custom_domain" "default" { + provider = google-beta + + project = "%{project_id}" + site_id = "%{site_id}" + custom_domain = "%{custom_domain}" + cert_preference = "GROUPED" + redirect_target = "destination.domain.com" + + wait_dns_verification = false +} +`, context) +} + +func testAccFirebaseHostingCustomDomain_firebasehostingCustomdomainAfterUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_hosting_custom_domain" "default" { + provider = google-beta + + project = "%{project_id}" + site_id = "%{site_id}" + custom_domain = "%{custom_domain}" + cert_preference = "PROJECT_GROUPED" + redirect_target = "destination2.domain.com" + + wait_dns_verification = false +} +`, context) +} + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_site_test.go.tmpl b/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_site_test.go.tmpl new file mode 100644 index 000000000000..cc0e77f6f4ee --- /dev/null +++ b/mmv1/third_party/terraform/services/firebasehosting/go/resource_firebase_hosting_site_test.go.tmpl @@ -0,0 +1,128 @@ +package firebasehosting_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccFirebaseHostingSite_firebasehostingSiteUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "site_id": "tf-test-site-update-app", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseHostingSiteDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseHostingSite_firebasehostingSiteBeforeUpdate(context), + }, + { + ResourceName: "google_firebase_hosting_site.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"site_id"}, + }, + { + Config: testAccFirebaseHostingSite_firebasehostingSiteAfterUpdate(context), + }, + { + ResourceName: "google_firebase_hosting_site.update", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"site_id"}, + }, + }, + }) +} + +func TestAccFirebaseHostingSite_firebasehostingSiteUpsert(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "site_id": "tf-test-site-upsert", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseHostingSiteDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseHostingSite_firebasehostingSiteUpsert(context), + }, + { + ResourceName: "google_firebase_hosting_site.create2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"site_id"}, + }, + }, + }) +} + + +func testAccFirebaseHostingSite_firebasehostingSiteBeforeUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_web_app" "before" { + provider = google-beta + project = "%{project_id}" + display_name = "tf-test Test web app before for Firebase Hosting" +} + +resource "google_firebase_hosting_site" "update" { + provider = google-beta + project = "%{project_id}" + site_id = "%{site_id}%{random_suffix}" + app_id = google_firebase_web_app.before.app_id +} +`, context) +} + +func testAccFirebaseHostingSite_firebasehostingSiteAfterUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_web_app" "after" { + provider = google-beta + project = "%{project_id}" + display_name = "tf-test Test web app after for Firebase Hosting" +} + +resource "google_firebase_hosting_site" "update" { + provider = google-beta + project = "%{project_id}" + site_id = "%{site_id}%{random_suffix}" + app_id = google_firebase_web_app.after.app_id +} +`, context) +} + +func testAccFirebaseHostingSite_firebasehostingSiteUpsert(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_hosting_site" "create" { + provider = google-beta + project = "%{project_id}" + site_id = "%{site_id}%{random_suffix}" +} + +resource "google_firebase_hosting_site" "create2" { + provider = google-beta + project = "%{project_id}" + site_id = "%{site_id}%{random_suffix}" + + depends_on = [google_firebase_hosting_site.create] +} +`, context) +} + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firebasehosting/resource_firebase_hosting_site_test.go.erb b/mmv1/third_party/terraform/services/firebasehosting/resource_firebase_hosting_site_test.go.erb index c12e002acaa2..57548470be69 100644 --- a/mmv1/third_party/terraform/services/firebasehosting/resource_firebase_hosting_site_test.go.erb +++ b/mmv1/third_party/terraform/services/firebasehosting/resource_firebase_hosting_site_test.go.erb @@ -46,6 +46,34 @@ func TestAccFirebaseHostingSite_firebasehostingSiteUpdate(t *testing.T) { }) } +func TestAccFirebaseHostingSite_firebasehostingSiteUpsert(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project_id": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "site_id": "tf-test-site-upsert", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckFirebaseHostingSiteDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccFirebaseHostingSite_firebasehostingSiteUpsert(context), + }, + { + ResourceName: "google_firebase_hosting_site.create2", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"site_id"}, + }, + }, + }) +} + + func testAccFirebaseHostingSite_firebasehostingSiteBeforeUpdate(context map[string]interface{}) string { return acctest.Nprintf(` resource "google_firebase_web_app" "before" { @@ -80,4 +108,22 @@ resource "google_firebase_hosting_site" "update" { `, context) } +func testAccFirebaseHostingSite_firebasehostingSiteUpsert(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_firebase_hosting_site" "create" { + provider = google-beta + project = "%{project_id}" + site_id = "%{site_id}%{random_suffix}" +} + +resource "google_firebase_hosting_site" "create2" { + provider = google-beta + project = "%{project_id}" + site_id = "%{site_id}%{random_suffix}" + + depends_on = [google_firebase_hosting_site.create] +} +`, context) +} + <% end -%> \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/firestore/go/resource_firestore_database_update_test.go b/mmv1/third_party/terraform/services/firestore/go/resource_firestore_database_update_test.go new file mode 100644 index 000000000000..3477c4a19a73 --- /dev/null +++ b/mmv1/third_party/terraform/services/firestore/go/resource_firestore_database_update_test.go @@ -0,0 +1,151 @@ +package firestore_test + +import ( + "fmt" + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccFirestoreDatabase_updateConcurrencyMode(t *testing.T) { + t.Parallel() + + projectId := envvar.GetTestProjectFromEnv() + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirestoreDatabase_concurrencyMode(projectId, randomSuffix, "OPTIMISTIC"), + }, + { + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "project"}, + }, + { + Config: testAccFirestoreDatabase_concurrencyMode(projectId, randomSuffix, "PESSIMISTIC"), + }, + { + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "project"}, + }, + }, + }) +} + +func TestAccFirestoreDatabase_updatePitrEnablement(t *testing.T) { + t.Parallel() + + projectId := envvar.GetTestProjectFromEnv() + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirestoreDatabase_pitrEnablement(projectId, randomSuffix, "POINT_IN_TIME_RECOVERY_ENABLED"), + }, + { + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "project"}, + }, + { + Config: testAccFirestoreDatabase_pitrEnablement(projectId, randomSuffix, "POINT_IN_TIME_RECOVERY_DISABLED"), + }, + { + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "project"}, + }, + }, + }) +} + +func TestAccFirestoreDatabase_updateDeleteProtectionState(t *testing.T) { + t.Parallel() + + projectId := envvar.GetTestProjectFromEnv() + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccFirestoreDatabase_deleteProtectionState(projectId, randomSuffix, "DELETE_PROTECTION_ENABLED"), + }, + { + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "project"}, + }, + { + Config: testAccFirestoreDatabase_deleteProtectionState(projectId, randomSuffix, "DELETE_PROTECTION_DISABLED"), + }, + { + ResourceName: "google_firestore_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "project"}, + }, + }, + }) +} + +func testAccFirestoreDatabase_concurrencyMode(projectId string, randomSuffix string, concurrencyMode string) string { + return fmt.Sprintf(` +resource "google_firestore_database" "database" { + project = "%s" + name = "tf-test-%s" + type = "DATASTORE_MODE" + location_id = "nam5" + concurrency_mode = "%s" +} +`, projectId, randomSuffix, concurrencyMode) +} + +func testAccFirestoreDatabase_pitrEnablement(projectId string, randomSuffix string, pointInTimeRecoveryEnablement string) string { + return fmt.Sprintf(` +resource "google_firestore_database" "database" { + project = "%s" + name = "tf-test-%s" + type = "DATASTORE_MODE" + location_id = "nam5" + point_in_time_recovery_enablement = "%s" +} +`, projectId, randomSuffix, pointInTimeRecoveryEnablement) +} + +func testAccFirestoreDatabase_deleteProtectionState(projectId string, randomSuffix string, deleteProtectionState string) string { + return fmt.Sprintf(` +resource "google_firestore_database" "database" { + project = "%s" + name = "tf-test-%s" + type = "DATASTORE_MODE" + location_id = "nam5" + delete_protection_state = "%s" +} +`, projectId, randomSuffix, deleteProtectionState) +} diff --git a/mmv1/third_party/terraform/services/gkebackup/go/resource_gke_backup_backup_plan_test.go b/mmv1/third_party/terraform/services/gkebackup/go/resource_gke_backup_backup_plan_test.go new file mode 100644 index 000000000000..4c2ef81cdaca --- /dev/null +++ b/mmv1/third_party/terraform/services/gkebackup/go/resource_gke_backup_backup_plan_test.go @@ -0,0 +1,366 @@ +package gkebackup_test + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccGKEBackupBackupPlan_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "network_name": acctest.BootstrapSharedTestNetwork(t, "gke-cluster"), + "subnetwork_name": acctest.BootstrapSubnet(t, "gke-cluster", acctest.BootstrapSharedTestNetwork(t, "gke-cluster")), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEBackupBackupPlanDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEBackupBackupPlan_basic(context), + }, + { + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccGKEBackupBackupPlan_permissive(context), + }, + { + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccGKEBackupBackupPlan_full(context), + }, + { + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccGKEBackupBackupPlan_rpo_daily_window(context), + }, + { + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccGKEBackupBackupPlan_rpo_weekly_window(context), + }, + { + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccGKEBackupBackupPlan_full(context), + }, + { + ResourceName: "google_gke_backup_backup_plan.backupplan", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccGKEBackupBackupPlan_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test-testcluster%{random_suffix}" + location = "us-central1" + initial_node_count = 1 + workload_identity_config { + workload_pool = "%{project}.svc.id.goog" + } + addons_config { + gke_backup_agent_config { + enabled = true + } + } + deletion_protection = false + network = "%{network_name}" + subnetwork = "%{subnetwork_name}" +} + +resource "google_gke_backup_backup_plan" "backupplan" { + name = "tf-test-testplan%{random_suffix}" + cluster = google_container_cluster.primary.id + location = "us-central1" + backup_config { + include_volume_data = false + include_secrets = false + all_namespaces = true + } + labels = { + "some-key-1": "some-value-1" + } +} +`, context) +} + +func testAccGKEBackupBackupPlan_permissive(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test-testcluster%{random_suffix}" + location = "us-central1" + initial_node_count = 1 + workload_identity_config { + workload_pool = "%{project}.svc.id.goog" + } + addons_config { + gke_backup_agent_config { + enabled = true + } + } + deletion_protection = false + network = "%{network_name}" + subnetwork = "%{subnetwork_name}" +} + +resource "google_gke_backup_backup_plan" "backupplan" { + name = "tf-test-testplan%{random_suffix}" + cluster = google_container_cluster.primary.id + location = "us-central1" + backup_config { + include_volume_data = false + include_secrets = false + all_namespaces = true + permissive_mode = true + } + labels = { + "some-key-1": "some-value-1" + } +} +`, context) +} + +func testAccGKEBackupBackupPlan_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test-testcluster%{random_suffix}" + location = "us-central1" + initial_node_count = 1 + workload_identity_config { + workload_pool = "%{project}.svc.id.goog" + } + addons_config { + gke_backup_agent_config { + enabled = true + } + } + deletion_protection = false + network = "%{network_name}" + subnetwork = "%{subnetwork_name}" +} + +resource "google_gke_backup_backup_plan" "backupplan" { + name = "tf-test-testplan%{random_suffix}" + cluster = google_container_cluster.primary.id + location = "us-central1" + retention_policy { + backup_delete_lock_days = 30 + backup_retain_days = 180 + } + backup_schedule { + cron_schedule = "0 9 * * 1" + } + backup_config { + include_volume_data = true + include_secrets = true + selected_applications { + namespaced_names { + name = "app1" + namespace = "ns1" + } + namespaced_names { + name = "app2" + namespace = "ns2" + } + } + } + labels = { + "some-key-2": "some-value-2" + } +} +`, context) +} + +func testAccGKEBackupBackupPlan_rpo_daily_window(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test-testcluster%{random_suffix}" + location = "us-central1" + initial_node_count = 1 + workload_identity_config { + workload_pool = "%{project}.svc.id.goog" + } + addons_config { + gke_backup_agent_config { + enabled = true + } + } + deletion_protection = false + network = "%{network_name}" + subnetwork = "%{subnetwork_name}" +} + +resource "google_gke_backup_backup_plan" "backupplan" { + name = "tf-test-testplan%{random_suffix}" + cluster = google_container_cluster.primary.id + location = "us-central1" + retention_policy { + backup_delete_lock_days = 30 + backup_retain_days = 180 + } + backup_schedule { + paused = true + rpo_config { + target_rpo_minutes=1440 + exclusion_windows { + start_time { + hours = 12 + } + duration = "7200s" + daily = true + } + exclusion_windows { + start_time { + hours = 8 + minutes = 40 + seconds = 1 + } + duration = "3600s" + single_occurrence_date { + year = 2024 + month = 3 + day = 16 + } + } + } + } + backup_config { + include_volume_data = true + include_secrets = true + selected_applications { + namespaced_names { + name = "app1" + namespace = "ns1" + } + namespaced_names { + name = "app2" + namespace = "ns2" + } + } + } + labels = { + "some-key-2": "some-value-2" + } +} +`, context) +} + +func testAccGKEBackupBackupPlan_rpo_weekly_window(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test-testcluster%{random_suffix}" + location = "us-central1" + initial_node_count = 1 + workload_identity_config { + workload_pool = "%{project}.svc.id.goog" + } + addons_config { + gke_backup_agent_config { + enabled = true + } + } + deletion_protection = false + network = "%{network_name}" + subnetwork = "%{subnetwork_name}" +} + +resource "google_gke_backup_backup_plan" "backupplan" { + name = "tf-test-testplan%{random_suffix}" + cluster = google_container_cluster.primary.id + location = "us-central1" + retention_policy { + backup_delete_lock_days = 30 + backup_retain_days = 180 + } + backup_schedule { + paused = true + rpo_config { + target_rpo_minutes=1400 + exclusion_windows { + start_time { + hours = 1 + minutes = 23 + } + duration = "1800s" + days_of_week { + days_of_week = ["MONDAY", "THURSDAY"] + } + } + exclusion_windows { + start_time { + hours = 12 + } + duration = "3600s" + single_occurrence_date { + year = 2024 + month = 3 + day = 17 + } + } + exclusion_windows { + start_time { + hours = 8 + minutes = 40 + } + duration = "600s" + single_occurrence_date { + year = 2024 + month = 3 + day = 18 + } + } + } + } + backup_config { + include_volume_data = true + include_secrets = true + selected_applications { + namespaced_names { + name = "app1" + namespace = "ns1" + } + namespaced_names { + name = "app2" + namespace = "ns2" + } + } + } + labels = { + "some-key-2": "some-value-2" + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/gkebackup/go/resource_gke_backup_restore_plan_test.go b/mmv1/third_party/terraform/services/gkebackup/go/resource_gke_backup_restore_plan_test.go new file mode 100644 index 000000000000..cf8d9dfa0c81 --- /dev/null +++ b/mmv1/third_party/terraform/services/gkebackup/go/resource_gke_backup_restore_plan_test.go @@ -0,0 +1,207 @@ + + +package gkebackup_test + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + + +func TestAccGKEBackupRestorePlan_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": envvar.GetTestProjectFromEnv(), + "deletion_protection": false, + "network_name": acctest.BootstrapSharedTestNetwork(t, "gke-cluster"), + "subnetwork_name": acctest.BootstrapSubnet(t, "gke-cluster", acctest.BootstrapSharedTestNetwork(t, "gke-cluster")), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEBackupRestorePlan_full(context), + }, + { + ResourceName: "google_gke_backup_restore_plan.restore_plan", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels"}, + }, + { + Config: testAccGKEBackupRestorePlan_update(context), + }, + { + ResourceName: "google_gke_backup_restore_plan.restore_plan", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "location", "terraform_labels"}, + }, + }, + }) +} + +func testAccGKEBackupRestorePlan_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test-restore-plan%{random_suffix}-cluster" + location = "us-central1" + initial_node_count = 1 + workload_identity_config { + workload_pool = "%{project}.svc.id.goog" + } + addons_config { + gke_backup_agent_config { + enabled = true + } + } + deletion_protection = "%{deletion_protection}" + network = "%{network_name}" + subnetwork = "%{subnetwork_name}" +} + +resource "google_gke_backup_backup_plan" "basic" { + name = "tf-test-restore-plan%{random_suffix}" + cluster = google_container_cluster.primary.id + location = "us-central1" + backup_config { + include_volume_data = true + include_secrets = true + all_namespaces = true + } +} + +resource "google_gke_backup_restore_plan" "restore_plan" { + name = "tf-test-restore-plan%{random_suffix}" + location = "us-central1" + backup_plan = google_gke_backup_backup_plan.basic.id + cluster = google_container_cluster.primary.id + restore_config { + all_namespaces = true + namespaced_resource_restore_mode = "MERGE_SKIP_ON_CONFLICT" + volume_data_restore_policy = "RESTORE_VOLUME_DATA_FROM_BACKUP" + cluster_resource_restore_scope { + all_group_kinds = true + } + cluster_resource_conflict_policy = "USE_EXISTING_VERSION" + restore_order { + group_kind_dependencies { + satisfying { + resource_group = "stable.example.com" + resource_kind = "kindA" + } + requiring { + resource_group = "stable.example.com" + resource_kind = "kindB" + } + } + group_kind_dependencies { + satisfying { + resource_group = "stable.example.com" + resource_kind = "kindB" + } + requiring { + resource_group = "stable.example.com" + resource_kind = "kindC" + } + } + } + volume_data_restore_policy_bindings { + policy = "RESTORE_VOLUME_DATA_FROM_BACKUP" + volume_type = "GCE_PERSISTENT_DISK" + } + } +} +`, context) +} + +func testAccGKEBackupRestorePlan_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test-restore-plan%{random_suffix}-cluster" + location = "us-central1" + initial_node_count = 1 + workload_identity_config { + workload_pool = "%{project}.svc.id.goog" + } + addons_config { + gke_backup_agent_config { + enabled = true + } + } + deletion_protection = "%{deletion_protection}" + network = "%{network_name}" + subnetwork = "%{subnetwork_name}" +} + +resource "google_gke_backup_backup_plan" "basic" { + name = "tf-test-restore-plan%{random_suffix}" + cluster = google_container_cluster.primary.id + location = "us-central1" + backup_config { + include_volume_data = true + include_secrets = true + all_namespaces = true + } +} + +resource "google_gke_backup_restore_plan" "restore_plan" { + name = "tf-test-restore-plan%{random_suffix}" + location = "us-central1" + backup_plan = google_gke_backup_backup_plan.basic.id + cluster = google_container_cluster.primary.id + restore_config { + all_namespaces = true + namespaced_resource_restore_mode = "MERGE_REPLACE_VOLUME_ON_CONFLICT" + volume_data_restore_policy = "RESTORE_VOLUME_DATA_FROM_BACKUP" + cluster_resource_restore_scope { + all_group_kinds = true + } + cluster_resource_conflict_policy = "USE_EXISTING_VERSION" + restore_order { + group_kind_dependencies { + satisfying { + resource_group = "stable.example.com" + resource_kind = "kindA" + } + requiring { + resource_group = "stable.example.com" + resource_kind = "kindB" + } + } + group_kind_dependencies { + satisfying { + resource_group = "stable.example.com" + resource_kind = "kindB" + } + requiring { + resource_group = "stable.example.com" + resource_kind = "kindC" + } + } + group_kind_dependencies { + satisfying { + resource_group = "stable.example.com" + resource_kind = "kindC" + } + requiring { + resource_group = "stable.example.com" + resource_kind = "kindD" + } + } + } + volume_data_restore_policy_bindings { + policy = "REUSE_VOLUME_HANDLE_FROM_BACKUP" + volume_type = "GCE_PERSISTENT_DISK" + } + } +} +`, context) +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/gkehub/go/resource_gke_hub_feature_membership_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub/go/resource_gke_hub_feature_membership_test.go.tmpl new file mode 100644 index 000000000000..76c7415e37c7 --- /dev/null +++ b/mmv1/third_party/terraform/services/gkehub/go/resource_gke_hub_feature_membership_test.go.tmpl @@ -0,0 +1,1438 @@ +package gkehub_test + +import ( + "context" + "fmt" + "strings" + "testing" + + dcl "github.com/GoogleCloudPlatform/declarative-resource-client-library/dcl" + gkehub "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/gkehub{{ $.DCLVersion }}" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccGKEHubFeatureMembership_gkehubFeatureAcmUpdate(t *testing.T) { + // Multiple fine-grained resources cause VCR to fail + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmUpdateStart(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test2%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member_1", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmMembershipUpdate(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test2%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member_2", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmAddHierarchyController(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipNotPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test2%s", context["random_suffix"])), + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test3%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member_3", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmRemoveFields(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipNotPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test2%s", context["random_suffix"])), + testAccCheckGkeHubFeatureMembershipNotPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("basic1%s", context["random_suffix"])), + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test3%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member_3", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmUpdateStart(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_feature_membership" "feature_member_1" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + configmanagement { + version = "1.15.1" + config_sync { + source_format = "hierarchy" + git { + sync_repo = "https://github.com/GoogleCloudPlatform/magic-modules" + secret_type = "none" + } + } + } +} + +resource "google_gke_hub_feature_membership" "feature_member_2" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_second.membership_id + configmanagement { + version = "1.15.1" + config_sync { + source_format = "hierarchy" + git { + sync_repo = "https://github.com/terraform-providers/terraform-provider-google" + secret_type = "none" + } + } + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmMembershipUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "changed" + } + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_feature_membership" "feature_member_1" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + configmanagement { + version = "1.15.1" + config_sync { + source_format = "hierarchy" + git { + sync_repo = "https://github.com/GoogleCloudPlatform/magic-modules" + secret_type = "none" + } + } + } +} + +resource "google_gke_hub_feature_membership" "feature_member_2" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_second.membership_id + configmanagement { + version = "1.15.1" + config_sync { + source_format = "hierarchy" + git { + sync_repo = "https://github.com/terraform-providers/terraform-provider-google-beta" + secret_type = "none" + } + } + policy_controller { + enabled = true + audit_interval_seconds = "10" + exemptable_namespaces = ["asdf", "1234"] + template_library_installed = true + } + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmAddHierarchyController(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "changed" + } + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_feature_membership" "feature_member_2" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_second.membership_id + configmanagement { + version = "1.15.1" + config_sync { + source_format = "unstructured" + git { + sync_repo = "https://github.com/terraform-providers/terraform-provider-google-beta" + secret_type = "none" + } + } + policy_controller { + enabled = true + audit_interval_seconds = "9" + exemptable_namespaces = ["different", "1234"] + template_library_installed = false + } + hierarchy_controller { + enable_hierarchical_resource_quota = true + enable_pod_tree_labels = false + enabled = true + } + } +} + +resource "google_gke_hub_feature_membership" "feature_member_3" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_third.membership_id + configmanagement { + version = "1.15.1" + config_sync { + source_format = "hierarchy" + git { + sync_repo = "https://github.com/hashicorp/terraform" + secret_type = "none" + } + } + policy_controller { + enabled = false + audit_interval_seconds = "100" + exemptable_namespaces = ["onetwothree", "fourfive"] + template_library_installed = true + } + hierarchy_controller { + enable_hierarchical_resource_quota = false + enable_pod_tree_labels = true + enabled = false + } + } +} + +resource "google_gke_hub_feature_membership" "feature_member_4" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_fourth.membership_id + configmanagement { + version = "1.15.1" + policy_controller { + enabled = true + audit_interval_seconds = "100" + template_library_installed = true + mutation_enabled = true + monitoring { + backends = ["CLOUD_MONITORING", "PROMETHEUS"] + } + } + } +} + + + +`, context) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmRemoveFields(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "changed" + } + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_feature_membership" "feature_member_3" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_third.membership_id + configmanagement { + version = "1.15.1" + policy_controller { + enabled = true + audit_interval_seconds = "100" + exemptable_namespaces = ["onetwothree", "fourfive"] + template_library_installed = true + } + } +} +`, context) +} + +func TestAccGKEHubFeatureMembership_gkehubFeatureAcmAllFields(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmFewFields(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmAllFields(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmFewFields(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureWithPreventDriftField(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmAllFields(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + acctest.Nprintf(` +resource "google_container_cluster" "primary" { + project = google_project.project.project_id + name = "tf-test-cl%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + deletion_protection = false + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership" { + project = google_project.project.project_id + membership_id = "tf-test1%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + configmanagement { + version = "1.15.1" + config_sync { + git { + sync_repo = "https://github.com/hashicorp/terraform" + https_proxy = "https://example.com" + policy_dir = "google/" + secret_type = "none" + sync_branch = "some-branch" + sync_rev = "v3.60.0" + sync_wait_secs = "30" + } + } + policy_controller { + enabled = true + audit_interval_seconds = "100" + exemptable_namespaces = ["onetwothree", "fourfive"] + template_library_installed = true + referential_rules_enabled = true + log_denies_enabled = true + } + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureWithPreventDriftField(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + acctest.Nprintf(` +resource "google_container_cluster" "primary" { + project = google_project.project.project_id + name = "tf-test-cl%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + deletion_protection = false + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership" { + project = google_project.project.project_id + membership_id = "tf-test1%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + configmanagement { + version = "1.15.1" + config_sync { + git { + sync_repo = "https://github.com/hashicorp/terraform" + https_proxy = "https://example.com" + policy_dir = "google/" + secret_type = "none" + sync_branch = "some-branch" + sync_rev = "v3.60.0" + sync_wait_secs = "30" + } + prevent_drift = true + } + policy_controller { + enabled = true + audit_interval_seconds = "100" + exemptable_namespaces = ["onetwothree", "fourfive"] + template_library_installed = true + referential_rules_enabled = true + log_denies_enabled = true + } + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmFewFields(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + acctest.Nprintf(` +resource "google_container_cluster" "primary" { + project = google_project.project.project_id + name = "tf-test-cl%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + deletion_protection = false + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub, google_project_service.acm] +} + +resource "google_gke_hub_membership" "membership" { + project = google_project.project.project_id + membership_id = "tf-test1%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_service_account" "feature_sa" { + project = google_project.project.project_id + account_id = "feature-sa" +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + configmanagement { + version = "1.15.1" + config_sync { + git { + sync_repo = "https://github.com/hashicorp/terraform" + secret_type = "none" + } + } + } +} +`, context) +} + +func TestAccGKEHubFeatureMembership_gkehubFeatureAcmOci(t *testing.T) { + // Multiple fine-grained resources cause VCR to fail + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmOciStart(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmOciUpdate(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_gkehubFeatureAcmOciRemoveFields(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "configmanagement", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmOciStart(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup_ACMOCI(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.container, google_project_service.gkehub] +} + +resource "google_service_account" "feature_sa" { + project = google_project.project.project_id + account_id = "feature-sa" +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_acmoci.membership_id + configmanagement { + version = "1.15.1" + config_sync { + source_format = "unstructured" + oci { + sync_repo = "us-central1-docker.pkg.dev/sample-project/config-repo/config-sync-gke:latest" + policy_dir = "config-connector" + sync_wait_secs = "20" + secret_type = "gcpserviceaccount" + gcp_service_account_email = google_service_account.feature_sa.email + } + prevent_drift = true + } + policy_controller { + enabled = true + audit_interval_seconds = "100" + exemptable_namespaces = ["onetwothree", "fourfive"] + template_library_installed = true + referential_rules_enabled = true + log_denies_enabled = true + } + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmOciUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup_ACMOCI(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.container, google_project_service.gkehub] +} + +resource "google_service_account" "feature_sa" { + project = google_project.project.project_id + account_id = "feature-sa" +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_acmoci.membership_id + configmanagement { + version = "1.15.1" + config_sync { + source_format = "hierarchy" + oci { + sync_repo = "us-central1-docker.pkg.dev/sample-project/config-repo/config-sync-gke:latest" + policy_dir = "config-sync" + sync_wait_secs = "15" + secret_type = "gcenode" + gcp_service_account_email = google_service_account.feature_sa.email + } + prevent_drift = true + } + policy_controller { + enabled = true + audit_interval_seconds = "100" + exemptable_namespaces = ["onetwothree", "fourfive"] + template_library_installed = true + referential_rules_enabled = true + log_denies_enabled = true + } + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_gkehubFeatureAcmOciRemoveFields(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup_ACMOCI(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "configmanagement" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.container, google_project_service.gkehub] +} + +resource "google_service_account" "feature_sa" { + project = google_project.project.project_id + account_id = "feature-sa" +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership_acmoci.membership_id + configmanagement { + version = "1.15.1" + policy_controller { + enabled = true + audit_interval_seconds = "100" + exemptable_namespaces = ["onetwothree", "fourfive"] + template_library_installed = true + referential_rules_enabled = true + log_denies_enabled = true + } + } +} +`, context) +} + +func TestAccGKEHubFeatureMembership_gkehubFeatureMesh(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeatureMembership_meshStart(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "servicemesh", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_meshUpdateManagement(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "servicemesh", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_meshUpdateControlPlane(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "servicemesh", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeatureMembership_meshStart(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + acctest.Nprintf(` +resource "google_container_cluster" "primary" { + project = google_project.project.project_id + name = "tf-test-cl%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + deletion_protection = false + depends_on = [google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership" { + project = google_project.project.project_id + membership_id = "tf-test1%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "servicemesh" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.container, google_project_service.gkehub, google_project_service.mesh] +} + +resource "google_service_account" "feature_sa" { + project = google_project.project.project_id + account_id = "feature-sa" +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + mesh { + management = "MANAGEMENT_AUTOMATIC" + control_plane = "AUTOMATIC" + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_meshUpdateManagement(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + acctest.Nprintf(` +resource "google_container_cluster" "primary" { + project = google_project.project.project_id + name = "tf-test-cl%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + deletion_protection = false + depends_on = [google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership" { + project = google_project.project.project_id + membership_id = "tf-test1%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "servicemesh" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.container, google_project_service.gkehub, google_project_service.mesh] +} + +resource "google_service_account" "feature_sa" { + project = google_project.project.project_id + account_id = "feature-sa" +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + mesh { + management = "MANAGEMENT_MANUAL" + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_meshUpdateControlPlane(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + acctest.Nprintf(` +resource "google_container_cluster" "primary" { + project = google_project.project.project_id + name = "tf-test-cl%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + deletion_protection = false + depends_on = [google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership" { + project = google_project.project.project_id + membership_id = "tf-test1%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "servicemesh" + location = "global" + + labels = { + foo = "bar" + } + depends_on = [google_project_service.container, google_project_service.gkehub, google_project_service.mesh] +} + +resource "google_service_account" "feature_sa" { + project = google_project.project.project_id + account_id = "feature-sa" +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + mesh { + control_plane = "MANUAL" + } +} +`, context) +} + +func TestAccGKEHubFeatureMembership_gkehubFeaturePolicyController(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeatureMembership_policycontrollerStart(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "policycontroller", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_policycontrollerUpdateDefaultFields(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "policycontroller", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeatureMembership_policycontrollerUpdateMaps(context), + Check: resource.ComposeTestCheckFunc( + testAccCheckGkeHubFeatureMembershipPresent(t, fmt.Sprintf("tf-test-gkehub%s", context["random_suffix"]), "global", "policycontroller", fmt.Sprintf("tf-test1%s", context["random_suffix"])), + ), + }, + { + ResourceName: "google_gke_hub_feature_membership.feature_member", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeatureMembership_policycontrollerStart(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "policycontroller" + location = "global" + depends_on = [google_project_service.container, google_project_service.gkehub, google_project_service.poco] +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + policycontroller { + policy_controller_hub_config { + install_spec = "INSTALL_SPEC_ENABLED" + exemptable_namespaces = ["foo"] + audit_interval_seconds = 30 + referential_rules_enabled = true + } + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_policycontrollerUpdateDefaultFields(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "policycontroller" + location = "global" + depends_on = [google_project_service.container, google_project_service.gkehub, google_project_service.poco] +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + policycontroller { + policy_controller_hub_config { + install_spec = "INSTALL_SPEC_SUSPENDED" + constraint_violation_limit = 50 + referential_rules_enabled = true + log_denies_enabled = true + mutation_enabled = true + monitoring { + backends = [ + "PROMETHEUS" + ] + } + deployment_configs { + component_name = "admission" + replica_count = 3 + pod_affinity = "ANTI_AFFINITY" + container_resources { + limits { + memory = "1Gi" + cpu = "1.5" + } + requests { + memory = "500Mi" + cpu = "150m" + } + } + pod_tolerations { + key = "key1" + operator = "Equal" + value = "value1" + effect = "NoSchedule" + } + } + deployment_configs { + component_name = "mutation" + replica_count = 3 + pod_affinity = "ANTI_AFFINITY" + } + policy_content { + template_library { + installation = "ALL" + } + bundles { + bundle_name = "pci-dss-v3.2.1" + exempted_namespaces = ["sample-namespace"] + } + bundles { + bundle_name = "nist-sp-800-190" + } + } + } + version = "1.17.0" + } +} +`, context) +} + +func testAccGKEHubFeatureMembership_policycontrollerUpdateMaps(context map[string]interface{}) string { + return gkeHubFeatureProjectSetup(context) + gkeHubClusterMembershipSetup(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + project = google_project.project.project_id + name = "policycontroller" + location = "global" + depends_on = [google_project_service.container, google_project_service.gkehub, google_project_service.poco] +} + +resource "google_gke_hub_feature_membership" "feature_member" { + project = google_project.project.project_id + location = "global" + feature = google_gke_hub_feature.feature.name + membership = google_gke_hub_membership.membership.membership_id + policycontroller { + policy_controller_hub_config { + install_spec = "INSTALL_SPEC_SUSPENDED" + constraint_violation_limit = 50 + referential_rules_enabled = true + log_denies_enabled = true + mutation_enabled = true + monitoring { + backends = [ + "PROMETHEUS" + ] + } + deployment_configs { + component_name = "admission" + pod_affinity = "NO_AFFINITY" + } + deployment_configs { + component_name = "audit" + container_resources { + limits { + memory = "1Gi" + cpu = "1.5" + } + requests { + memory = "500Mi" + cpu = "150m" + } + } + } + } + version = "1.17.0" + } +} +`, context) +} + +func gkeHubClusterMembershipSetup(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test-cl%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_container_cluster" "secondary" { + name = "tf-test-cl2%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_container_cluster" "tertiary" { + name = "tf-test-cl3%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + + +resource "google_container_cluster" "quarternary" { + name = "tf-test-cl4%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership" { + project = google_project.project.project_id + membership_id = "tf-test1%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_membership" "membership_second" { + project = google_project.project.project_id + membership_id = "tf-test2%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.secondary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_membership" "membership_third" { + project = google_project.project.project_id + membership_id = "tf-test3%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.tertiary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} + +resource "google_gke_hub_membership" "membership_fourth" { + project = google_project.project.project_id + membership_id = "tf-test4%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.quarternary.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} +`, context) +} + +func gkeHubClusterMembershipSetup_ACMOCI(context map[string]interface{}) string { + return acctest.Nprintf(` + +resource "google_compute_network" "testnetwork" { + project = google_project.project.project_id + name = "testnetwork" + auto_create_subnetworks = true + depends_on = [google_project_service.compute] +} + +resource "google_container_cluster" "container_acmoci" { + name = "tf-test-cl%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + network = google_compute_network.testnetwork.self_link + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership_acmoci" { + project = google_project.project.project_id + membership_id = "tf-test1%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.container_acmoci.id}" + } + } +{{- if ne $.TargetVersionName "ga" }} + description = "test resource." +{{- end }} +} +`, context) +} + +func testAccCheckGkeHubFeatureMembershipPresent(t *testing.T, project, location, feature, membership string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + obj := &gkehub.FeatureMembership{ + Feature: dcl.StringOrNil(feature), + Location: dcl.StringOrNil(location), + Membership: dcl.StringOrNil(membership), + Project: dcl.String(project), + } + + _, err := transport_tpg.NewDCLGkeHubClient(config, "", "", 0).GetFeatureMembership(context.Background(), obj) + if err != nil { + return err + } + return nil + } +} + +func testAccCheckGkeHubFeatureMembershipNotPresent(t *testing.T, project, location, feature, membership string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + obj := &gkehub.FeatureMembership{ + Feature: dcl.StringOrNil(feature), + Location: dcl.StringOrNil(location), + Membership: dcl.StringOrNil(membership), + Project: dcl.String(project), + } + + _, err := transport_tpg.NewDCLGkeHubClient(config, "", "", 0).GetFeatureMembership(context.Background(), obj) + if err == nil { + return fmt.Errorf("Did not expect to find GKE Feature Membership for projects/%s/locations/%s/features/%s/membershipId/%s", project, location, feature, membership) + } + if dcl.IsNotFound(err) { + return nil + } + return err + } +} + +// Copy this function from the package gkehub2_test to here +func gkeHubFeatureProjectSetup(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "tf-test-gkehub%{random_suffix}" + project_id = "tf-test-gkehub%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} + +resource "google_project_service" "anthos" { + project = google_project.project.project_id + service = "anthos.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "mesh" { + project = google_project.project.project_id + service = "meshconfig.googleapis.com" +} + +resource "google_project_service" "mci" { + project = google_project.project.project_id + service = "multiclusteringress.googleapis.com" +} + +resource "google_project_service" "acm" { + project = google_project.project.project_id + service = "anthosconfigmanagement.googleapis.com" +} + +resource "google_project_service" "poco" { + project = google_project.project.project_id + service = "anthospolicycontroller.googleapis.com" +} + +resource "google_project_service" "mcsd" { + project = google_project.project.project_id + service = "multiclusterservicediscovery.googleapis.com" +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "container" { + project = google_project.project.project_id + service = "container.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false +} +`, context) +} + +// Copy this function from the package gkehub2_test to here +func testAccCheckGKEHubFeatureDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_gke_hub_feature" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}GKEHub2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/features/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("GKEHubFeature still exists at %s", url) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/services/gkehub2/go/iam_gke_hub_feature_test.go b/mmv1/third_party/terraform/services/gkehub2/go/iam_gke_hub_feature_test.go new file mode 100644 index 000000000000..2c8c61f8b190 --- /dev/null +++ b/mmv1/third_party/terraform/services/gkehub2/go/iam_gke_hub_feature_test.go @@ -0,0 +1,316 @@ +package gkehub2_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccGKEHub2FeatureIamBindingGenerated(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "role": "roles/viewer", + "project_id": fmt.Sprintf("tf-test-gkehub-%s", acctest.RandString(t, 10)), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHub2FeatureIamBinding_basicGenerated(context), + }, + { + ResourceName: "google_gke_hub_feature_iam_binding.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/features/%s roles/viewer", context["project_id"], "global", fmt.Sprint("multiclusterservicediscovery")), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Binding update + Config: testAccGKEHub2FeatureIamBinding_updateGenerated(context), + }, + { + ResourceName: "google_gke_hub_feature_iam_binding.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/features/%s roles/viewer", context["project_id"], "global", fmt.Sprint("multiclusterservicediscovery")), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccGKEHub2FeatureIamMemberGenerated(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "role": "roles/viewer", + "project_id": fmt.Sprintf("tf-test-gkehub-%s", acctest.RandString(t, 10)), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccGKEHub2FeatureIamMember_basicGenerated(context), + }, + { + ResourceName: "google_gke_hub_feature_iam_member.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/features/%s roles/viewer user:admin@hashicorptest.com", context["project_id"], "global", fmt.Sprint("multiclusterservicediscovery")), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccGKEHub2FeatureIamPolicyGenerated(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "role": "roles/viewer", + "project_id": fmt.Sprintf("tf-test-gkehub-%s", acctest.RandString(t, 10)), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHub2FeatureIamPolicy_basicGenerated(context), + Check: resource.TestCheckResourceAttrSet("data.google_gke_hub_feature_iam_policy.foo", "policy_data"), + }, + { + ResourceName: "google_gke_hub_feature_iam_policy.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/features/%s", context["project_id"], "global", fmt.Sprint("multiclusterservicediscovery")), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHub2FeatureIamPolicy_emptyBinding(context), + }, + { + ResourceName: "google_gke_hub_feature_iam_policy.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/features/%s", context["project_id"], "global", fmt.Sprint("multiclusterservicediscovery")), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHub2FeatureIamMember_basicGenerated(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "%{project_id}" + project_id = "%{project_id}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} +resource "google_project_service" "mcsd" { + project = google_project.project.project_id + service = "multiclusterservicediscovery.googleapis.com" +} +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false +} +resource "google_gke_hub_feature" "feature" { + name = "multiclusterservicediscovery" + location = "global" + project = google_project.project.project_id + labels = { + foo = "bar" + } + depends_on = [google_project_service.mcsd, google_project_service.gkehub] +} +resource "google_gke_hub_feature_iam_member" "foo" { + project = google_gke_hub_feature.feature.project + location = google_gke_hub_feature.feature.location + name = google_gke_hub_feature.feature.name + role = "%{role}" + member = "user:admin@hashicorptest.com" +} +`, context) +} + +func testAccGKEHub2FeatureIamPolicy_basicGenerated(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "%{project_id}" + project_id = "%{project_id}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} +resource "google_project_service" "mcsd" { + project = google_project.project.project_id + service = "multiclusterservicediscovery.googleapis.com" +} +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false +} +resource "google_gke_hub_feature" "feature" { + name = "multiclusterservicediscovery" + location = "global" + project = google_project.project.project_id + labels = { + foo = "bar" + } + depends_on = [google_project_service.mcsd, google_project_service.gkehub] +} +data "google_iam_policy" "foo" { + binding { + role = "%{role}" + members = ["user:admin@hashicorptest.com"] + } +} +resource "google_gke_hub_feature_iam_policy" "foo" { + project = google_gke_hub_feature.feature.project + location = google_gke_hub_feature.feature.location + name = google_gke_hub_feature.feature.name + policy_data = data.google_iam_policy.foo.policy_data +} +data "google_gke_hub_feature_iam_policy" "foo" { + project = google_gke_hub_feature.feature.project + location = google_gke_hub_feature.feature.location + name = google_gke_hub_feature.feature.name + depends_on = [ + google_gke_hub_feature_iam_policy.foo + ] +} +`, context) +} + +func testAccGKEHub2FeatureIamPolicy_emptyBinding(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "%{project_id}" + project_id = "%{project_id}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} +resource "google_project_service" "mcsd" { + project = google_project.project.project_id + service = "multiclusterservicediscovery.googleapis.com" +} +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false +} +resource "google_gke_hub_feature" "feature" { + name = "multiclusterservicediscovery" + location = "global" + project = google_project.project.project_id + labels = { + foo = "bar" + } + depends_on = [google_project_service.mcsd, google_project_service.gkehub] +} +data "google_iam_policy" "foo" { +} +resource "google_gke_hub_feature_iam_policy" "foo" { + project = google_gke_hub_feature.feature.project + location = google_gke_hub_feature.feature.location + name = google_gke_hub_feature.feature.name + policy_data = data.google_iam_policy.foo.policy_data +} +`, context) +} + +func testAccGKEHub2FeatureIamBinding_basicGenerated(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "%{project_id}" + project_id = "%{project_id}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} +resource "google_project_service" "mcsd" { + project = google_project.project.project_id + service = "multiclusterservicediscovery.googleapis.com" +} +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false +} +resource "google_gke_hub_feature" "feature" { + name = "multiclusterservicediscovery" + location = "global" + project = google_project.project.project_id + labels = { + foo = "bar" + } + depends_on = [google_project_service.mcsd, google_project_service.gkehub] +} +resource "google_gke_hub_feature_iam_binding" "foo" { + project = google_gke_hub_feature.feature.project + location = google_gke_hub_feature.feature.location + name = google_gke_hub_feature.feature.name + role = "%{role}" + members = ["user:admin@hashicorptest.com"] +} +`, context) +} + +func testAccGKEHub2FeatureIamBinding_updateGenerated(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "%{project_id}" + project_id = "%{project_id}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} +resource "google_project_service" "mcsd" { + project = google_project.project.project_id + service = "multiclusterservicediscovery.googleapis.com" +} +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false +} +resource "google_gke_hub_feature" "feature" { + name = "multiclusterservicediscovery" + location = "global" + project = google_project.project.project_id + labels = { + foo = "bar" + } + depends_on = [google_project_service.mcsd, google_project_service.gkehub] +} +resource "google_gke_hub_feature_iam_binding" "foo" { + project = google_gke_hub_feature.feature.project + location = google_gke_hub_feature.feature.location + name = google_gke_hub_feature.feature.name + role = "%{role}" + members = ["user:admin@hashicorptest.com", "user:gterraformtest1@gmail.com"] +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/gkehub2/go/resource_gke_hub_feature_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/go/resource_gke_hub_feature_test.go.tmpl new file mode 100644 index 000000000000..6ade6964b38e --- /dev/null +++ b/mmv1/third_party/terraform/services/gkehub2/go/resource_gke_hub_feature_test.go.tmpl @@ -0,0 +1,1017 @@ +package gkehub2_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccGKEHubFeature_gkehubFeatureFleetObservability(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeature_gkehubFeatureFleetObservability(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeature_gkehubFeatureFleetObservabilityUpdate1(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeature_gkehubFeatureFleetObservabilityUpdate2(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeature_gkehubFeatureFleetObservability(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "time_sleep" "wait_for_gkehub_enablement" { + create_duration = "150s" + depends_on = [google_project_service.gkehub] +} + +resource "google_gke_hub_feature" "feature" { + name = "fleetobservability" + location = "global" + project = google_project.project.project_id + spec { + fleetobservability { + logging_config { + default_config { + mode = "MOVE" + } + fleet_scope_logs_config { + mode = "COPY" + } + } + } + } + depends_on = [time_sleep.wait_for_gkehub_enablement] +} +`, context) +} + +func testAccGKEHubFeature_gkehubFeatureFleetObservabilityUpdate1(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "time_sleep" "wait_for_gkehub_enablement" { + create_duration = "150s" + depends_on = [google_project_service.gkehub] +} + +resource "google_gke_hub_feature" "feature" { + name = "fleetobservability" + location = "global" + project = google_project.project.project_id + spec { + fleetobservability { + logging_config { + default_config { + mode = "MOVE" + } + } + } + } + depends_on = [time_sleep.wait_for_gkehub_enablement] +} +`, context) +} + +func testAccGKEHubFeature_gkehubFeatureFleetObservabilityUpdate2(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "time_sleep" "wait_for_gkehub_enablement" { + create_duration = "150s" + depends_on = [google_project_service.gkehub] +} + +resource "google_gke_hub_feature" "feature" { + name = "fleetobservability" + location = "global" + project = google_project.project.project_id + spec { + fleetobservability { + logging_config { + fleet_scope_logs_config { + mode = "COPY" + } + } + } + } + depends_on = [time_sleep.wait_for_gkehub_enablement] +} +`, context) +} + +{{ if ne $.TargetVersionName `ga` -}} +func gkeHubFeatureProjectSetup(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "tf-test-gkehub%{random_suffix}" + project_id = "tf-test-gkehub%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" + provider = google-beta +} + +resource "google_project_service" "mesh" { + project = google_project.project.project_id + service = "meshconfig.googleapis.com" + provider = google-beta +} + +resource "google_project_service" "mci" { + project = google_project.project.project_id + service = "multiclusteringress.googleapis.com" + provider = google-beta +} + +resource "google_project_service" "acm" { + project = google_project.project.project_id + service = "anthosconfigmanagement.googleapis.com" + provider = google-beta +} + +resource "google_project_service" "mcsd" { + project = google_project.project.project_id + service = "multiclusterservicediscovery.googleapis.com" + provider = google-beta +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + disable_on_destroy = false + provider = google-beta +} + +resource "google_project_service" "container" { + project = google_project.project.project_id + service = "container.googleapis.com" + disable_on_destroy = false + provider = google-beta +} + +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false + provider = google-beta +} +`, context) +} +{{- end }} + +func TestAccGKEHubFeature_gkehubFeatureMciUpdate(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeature_gkehubFeatureMciUpdateStart(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"update_time"}, + }, + { + Config: testAccGKEHubFeature_gkehubFeatureMciChangeMembership(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"update_time", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccGKEHubFeature_gkehubFeatureMciUpdateStart(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` + +resource "google_container_cluster" "primary" { + name = "tf-test%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_container_cluster" "secondary" { + name = "tf-test2%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership" { + membership_id = "tf-test%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } + project = google_project.project.project_id +} + +resource "google_gke_hub_membership" "membership_second" { + membership_id = "tf-test2%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.secondary.id}" + } + } + project = google_project.project.project_id +} + +resource "google_gke_hub_feature" "feature" { + name = "multiclusteringress" + location = "global" + spec { + multiclusteringress { + config_membership = google_gke_hub_membership.membership.id + } + } + project = google_project.project.project_id +} +`, context) +} + +func testAccGKEHubFeature_gkehubFeatureMciChangeMembership(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_container_cluster" "primary" { + name = "tf-test%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_container_cluster" "secondary" { + name = "tf-test2%{random_suffix}" + location = "us-central1-a" + initial_node_count = 1 + project = google_project.project.project_id + deletion_protection = false + depends_on = [google_project_service.mci, google_project_service.container, google_project_service.container, google_project_service.gkehub] +} + +resource "google_gke_hub_membership" "membership" { + membership_id = "tf-test%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.primary.id}" + } + } + project = google_project.project.project_id +} + +resource "google_gke_hub_membership" "membership_second" { + membership_id = "tf-test2%{random_suffix}" + endpoint { + gke_cluster { + resource_link = "//container.googleapis.com/${google_container_cluster.secondary.id}" + } + } + project = google_project.project.project_id +} + +resource "google_gke_hub_feature" "feature" { + name = "multiclusteringress" + location = "global" + spec { + multiclusteringress { + config_membership = google_gke_hub_membership.membership_second.id + } + } + labels = { + foo = "bar" + } + project = google_project.project.project_id +} +`, context) +} + +func TestAccGKEHubFeature_FleetDefaultMemberConfigServiceMesh(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigServiceMesh(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"project"}, + }, + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigServiceMeshUpdate(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigServiceMeshRemovalUpdate(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigServiceMeshReAddUpdate(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigServiceMesh(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "servicemesh" + location = "global" + fleet_default_member_config { + mesh { + management = "MANAGEMENT_AUTOMATIC" + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.mesh] + project = google_project.project.project_id +} +`, context) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigServiceMeshUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "servicemesh" + location = "global" + fleet_default_member_config { + mesh { + management = "MANAGEMENT_MANUAL" + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.mesh] + project = google_project.project.project_id +} +`, context) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigServiceMeshRemovalUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "servicemesh" + location = "global" + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.mesh] + project = google_project.project.project_id +} +`, context) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigServiceMeshReAddUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "servicemesh" + location = "global" + fleet_default_member_config { + mesh { + management = "MANAGEMENT_MANUAL" + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.mesh] + project = google_project.project.project_id +} +`, context) +} + +func TestAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"project"}, + }, + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementUpdate(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagement(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "configmanagement" + location = "global" + fleet_default_member_config { + configmanagement { + version = "1.16.0" + config_sync { + source_format = "hierarchy" + git { + sync_repo = "https://github.com/GoogleCloudPlatform/magic-modules" + sync_branch = "master" + policy_dir = "." + sync_rev = "HEAD" + secret_type = "none" + sync_wait_secs = "15" + } + } + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.acm] + project = google_project.project.project_id +} +`, context) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigConfigManagementUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "configmanagement" + location = "global" + fleet_default_member_config { + configmanagement { + version = "1.16.1" + config_sync { + prevent_drift = true + source_format = "unstructured" + oci { + sync_repo = "us-central1-docker.pkg.dev/corp-gke-build-artifacts/acm/configs:latest" + policy_dir = "/acm/nonprod-root/" + secret_type = "gcpserviceaccount" + sync_wait_secs = "15" + gcp_service_account_email = "gke-cluster@gke-foo-nonprod.iam.gserviceaccount.com" + } + } + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.acm] + project = google_project.project.project_id +} +`, context) +} + +func TestAccGKEHubFeature_Clusterupgrade(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeature_Clusterupgrade(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"project", "update_time"}, + }, + { + Config: testAccGKEHubFeature_ClusterupgradeUpdate(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"update_time"}, + }, + }, + }) +} + +func testAccGKEHubFeature_Clusterupgrade(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "clusterupgrade" + location = "global" + spec { + clusterupgrade { + upstream_fleets = [] + post_conditions { + soaking = "60s" + } + } + } + depends_on = [google_project_service.gkehub] + project = google_project.project.project_id +} + +resource "google_gke_hub_feature" "feature_2" { + name = "clusterupgrade" + location = "global" + spec { + clusterupgrade { + upstream_fleets = [] + post_conditions { + soaking = "60s" + } + } + } + depends_on = [google_project_service.gkehub_2] + project = google_project.project_2.project_id +} +`, context) +} + +func testAccGKEHubFeature_ClusterupgradeUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "clusterupgrade" + location = "global" + spec { + clusterupgrade { + upstream_fleets = [google_project.project_2.number] + post_conditions { + soaking = "120s" + } + gke_upgrade_overrides { + upgrade { + name = "k8s_control_plane" + version = "1.22.1-gke.100" + } + post_conditions { + soaking = "240s" + } + } + } + } + project = google_project.project.project_id +} + +resource "google_gke_hub_feature" "feature_2" { + name = "clusterupgrade" + location = "global" + spec { + clusterupgrade { + upstream_fleets = [] + post_conditions { + soaking = "60s" + } + } + } + depends_on = [google_project_service.gkehub_2] + project = google_project.project_2.project_id +} +`, context) +} + +func TestAccGKEHubFeature_FleetDefaultMemberConfigPolicyController(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigPolicyController(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"project", "update_time"}, + }, + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigPolicyControllerFull(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHubFeature_FleetDefaultMemberConfigPolicyControllerMinimal(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigPolicyController(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "policycontroller" + location = "global" + fleet_default_member_config { + policycontroller { + policy_controller_hub_config { + install_spec = "INSTALL_SPEC_ENABLED" + exemptable_namespaces = ["foo"] + policy_content { + bundles { + bundle = "policy-essentials-v2022" + exempted_namespaces = ["foo", "bar"] + } + } + audit_interval_seconds = 30 + referential_rules_enabled = true + } + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.poco] + project = google_project.project.project_id +} +`, context) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigPolicyControllerFull(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "policycontroller" + location = "global" + fleet_default_member_config { + policycontroller { + policy_controller_hub_config { + install_spec = "INSTALL_SPEC_SUSPENDED" + policy_content { + bundles { + bundle = "pci-dss-v3.2.1" + exempted_namespaces = ["baz", "bar"] + } + bundles { + bundle = "nist-sp-800-190" + exempted_namespaces = [] + } + template_library { + installation = "ALL" + } + } + constraint_violation_limit = 50 + referential_rules_enabled = true + log_denies_enabled = true + mutation_enabled = true + deployment_configs { + component = "admission" + replica_count = 2 + pod_affinity = "ANTI_AFFINITY" + } + deployment_configs { + component = "audit" + container_resources { + limits { + memory = "1Gi" + cpu = "1.5" + } + requests { + memory = "500Mi" + cpu = "150m" + } + } + pod_toleration { + key = "key1" + operator = "Equal" + value = "value1" + effect = "NoSchedule" + } + } + monitoring { + backends = [ + "PROMETHEUS" + ] + } + } + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.poco] + project = google_project.project.project_id +} +`, context) +} + +func testAccGKEHubFeature_FleetDefaultMemberConfigPolicyControllerMinimal(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "policycontroller" + location = "global" + fleet_default_member_config { + policycontroller { + policy_controller_hub_config { + install_spec = "INSTALL_SPEC_ENABLED" + policy_content {} + constraint_violation_limit = 50 + referential_rules_enabled = true + log_denies_enabled = true + mutation_enabled = true + deployment_configs { + component = "admission" + } + monitoring {} + } + } + } + depends_on = [google_project_service.anthos, google_project_service.gkehub, google_project_service.poco] + project = google_project.project.project_id +} +`, context) +} + +func TestAccGKEHubFeature_gkehubFeatureMcsd(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHubFeatureDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGKEHubFeature_gkehubFeatureMcsd(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"project", "labels", "terraform_labels"}, + }, + { + Config: testAccGKEHubFeature_gkehubFeatureMcsdUpdate(context), + }, + { + ResourceName: "google_gke_hub_feature.feature", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccGKEHubFeature_gkehubFeatureMcsd(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "multiclusterservicediscovery" + location = "global" + project = "projects/${google_project.project.project_id}" + labels = { + foo = "bar" + } + depends_on = [google_project_service.mcsd] +} +`, context) +} + +func testAccGKEHubFeature_gkehubFeatureMcsdUpdate(context map[string]interface{}) string { + return gkeHubFeatureProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_feature" "feature" { + name = "multiclusterservicediscovery" + location = "global" + project = google_project.project.project_id + labels = { + foo = "quux" + baz = "qux" + } + depends_on = [google_project_service.mcsd] +} +`, context) +} + +func gkeHubFeatureProjectSetupForGA(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "tf-test-gkehub%{random_suffix}" + project_id = "tf-test-gkehub%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} + +resource "google_project_service" "mesh" { + project = google_project.project.project_id + service = "meshconfig.googleapis.com" +} + +resource "google_project_service" "mci" { + project = google_project.project.project_id + service = "multiclusteringress.googleapis.com" +} + +resource "google_project_service" "acm" { + project = google_project.project.project_id + service = "anthosconfigmanagement.googleapis.com" +} + +resource "google_project_service" "poco" { + project = google_project.project.project_id + service = "anthospolicycontroller.googleapis.com" +} + +resource "google_project_service" "mcsd" { + project = google_project.project.project_id + service = "multiclusterservicediscovery.googleapis.com" +} + +resource "google_project_service" "compute" { + project = google_project.project.project_id + service = "compute.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "container" { + project = google_project.project.project_id + service = "container.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "anthos" { + project = google_project.project.project_id + service = "anthos.googleapis.com" +} + +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false +} + +resource "google_project" "project_2" { + name = "tf-test-gkehub%{random_suffix}-2" + project_id = "tf-test-gkehub%{random_suffix}-2" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} + +resource "google_project_service" "compute_2" { + project = google_project.project_2.project_id + service = "compute.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "container_2" { + project = google_project.project_2.project_id + service = "container.googleapis.com" + disable_on_destroy = false +} + +resource "google_project_service" "gkehub_2" { + project = google_project.project_2.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false +} +`, context) +} + +func testAccCheckGKEHubFeatureDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_gke_hub_feature" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}GKEHub2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/features/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("GKEHubFeature still exists at %s", url) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/services/gkehub2/go/resource_gke_hub_fleet_test.go.tmpl b/mmv1/third_party/terraform/services/gkehub2/go/resource_gke_hub_fleet_test.go.tmpl new file mode 100644 index 000000000000..f3b872f24615 --- /dev/null +++ b/mmv1/third_party/terraform/services/gkehub2/go/resource_gke_hub_fleet_test.go.tmpl @@ -0,0 +1,180 @@ +package gkehub2_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccGKEHub2Fleet_gkehubFleetBasicExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGKEHub2FleetDestroyProducer(t), + ExternalProviders: map[string]resource.ExternalProvider{ + "time": {}, + }, + Steps: []resource.TestStep{ + { + Config: testAccGKEHub2Fleet_basic(context), + }, + { + ResourceName: "google_gke_hub_fleet.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHub2Fleet_update(context), + }, + { + ResourceName: "google_gke_hub_fleet.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGKEHub2Fleet_removedDefaultClusterConfig(context), + }, + { + ResourceName: "google_gke_hub_fleet.default", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGKEHub2Fleet_basic(context map[string]interface{}) string { + return gkeHubFleetProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_fleet" "default" { + project = google_project.project.project_id + display_name = "my production fleet" + default_cluster_config { + binary_authorization_config { + evaluation_mode = "DISABLED" + } + security_posture_config { + mode = "DISABLED" + vulnerability_mode = "VULNERABILITY_DISABLED" + } + } + depends_on = [time_sleep.wait_for_gkehub_enablement] +} +`, context) +} + +func testAccGKEHub2Fleet_update(context map[string]interface{}) string { + return gkeHubFleetProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_fleet" "default" { + project = google_project.project.project_id + display_name = "my updated fleet" + default_cluster_config { + binary_authorization_config { + evaluation_mode = "POLICY_BINDINGS" + policy_bindings { + name = "projects/${google_project.project.project_id}/platforms/gke/policies/policy_id" + } + } + security_posture_config { + mode = "BASIC" + vulnerability_mode = "VULNERABILITY_BASIC" + } + } + depends_on = [time_sleep.wait_for_gkehub_enablement] +} +`, context) +} + +func testAccGKEHub2Fleet_removedDefaultClusterConfig(context map[string]interface{}) string { + return gkeHubFleetProjectSetupForGA(context) + acctest.Nprintf(` +resource "google_gke_hub_fleet" "default" { + project = google_project.project.project_id + display_name = "my updated fleet" + + depends_on = [time_sleep.wait_for_gkehub_enablement] +} +`, context) +} + +func gkeHubFleetProjectSetupForGA(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + name = "tf-test-gkehub%{random_suffix}" + project_id = "tf-test-gkehub%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} + +resource "google_project_service" "gkehub" { + project = google_project.project.project_id + service = "gkehub.googleapis.com" + disable_on_destroy = false + depends_on = [google_project_service.anthos] +} + +resource "google_project_service" "anthos" { + project = google_project.project.project_id + service = "anthos.googleapis.com" + disable_on_destroy = false +} + +resource "time_sleep" "wait_for_gkehub_enablement" { + create_duration = "150s" + depends_on = [google_project_service.gkehub] +} +`, context) +} + +func testAccCheckGKEHub2FleetDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_gke_hub_fleet" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}GKEHub2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/global/fleets/default") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("GKEHub2Fleet still exists at %s", url) + } + } + + return nil + } +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/gkeonprem/go/gkeonprem_operation.go b/mmv1/third_party/terraform/services/gkeonprem/go/gkeonprem_operation.go new file mode 100644 index 000000000000..255f43439b96 --- /dev/null +++ b/mmv1/third_party/terraform/services/gkeonprem/go/gkeonprem_operation.go @@ -0,0 +1,145 @@ +package gkeonprem + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + cloudresourcemanager "google.golang.org/api/cloudresourcemanager/v1" +) + +type gkeonpremOpError struct { + *cloudresourcemanager.Status +} + +func (e gkeonpremOpError) Error() string { + var validationCheck map[string]interface{} + + for _, msg := range e.Details { + detail := make(map[string]interface{}) + if err := json.Unmarshal(msg, &detail); err != nil { + continue + } + + if _, ok := detail["validationCheck"]; ok { + delete(detail, "@type") + validationCheck = detail + } + } + + if validationCheck != nil { + bytes, err := json.MarshalIndent(validationCheck, "", " ") + if err != nil { + return fmt.Sprintf("Error code %v message: %s validation check: %s", e.Code, e.Message, validationCheck) + } + + return fmt.Sprintf("Error code %v message: %s\n %s", e.Code, e.Message, bytes) + } + + return fmt.Sprintf("Error code %v, message: %s", e.Code, e.Message) +} + +type gkeonpremOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + Op tpgresource.CommonOperation +} + +func (w *gkeonpremOperationWaiter) State() string { + if w == nil { + return fmt.Sprintf("Operation is nil!") + } + + return fmt.Sprintf("done: %v", w.Op.Done) +} + +func (w *gkeonpremOperationWaiter) Error() error { + if w != nil && w.Op.Error != nil { + return &gkeonpremOpError{w.Op.Error} + } + return nil +} + +func (w *gkeonpremOperationWaiter) IsRetryable(error) bool { + return false +} + +func (w *gkeonpremOperationWaiter) SetOp(op interface{}) error { + if err := tpgresource.Convert(op, &w.Op); err != nil { + return err + } + return nil +} + +func (w *gkeonpremOperationWaiter) OpName() string { + if w == nil { + return "" + } + + return w.Op.Name +} + +func (w *gkeonpremOperationWaiter) PendingStates() []string { + return []string{"done: false"} +} + +func (w *gkeonpremOperationWaiter) TargetStates() []string { + return []string{"done: true"} +} + +func (w *gkeonpremOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + // Returns the proper get. + url := fmt.Sprintf("%s%s", w.Config.GkeonpremBasePath, w.Op.Name) + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func creategkeonpremWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*gkeonpremOperationWaiter, error) { + w := &gkeonpremOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func GkeonpremOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := creategkeonpremWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.Op.Response), response) +} + +func GkeonpremOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := creategkeonpremWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_bare_metal_cluster_test.go b/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_bare_metal_cluster_test.go new file mode 100644 index 000000000000..caa9138f058b --- /dev/null +++ b/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_bare_metal_cluster_test.go @@ -0,0 +1,575 @@ +package gkeonprem_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBasic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-metallb", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) +} + +func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-manuallb", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_cluster.cluster-bgplb", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLbStart(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_bare_metal_cluster" "cluster-metallb" { + name = "cluster-metallb%{random_suffix}" + location = "us-west1" + annotations = { + env = "test" + } + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + bare_metal_version = "1.12.3" + network_config { + island_mode_cidr { + service_address_cidr_blocks = ["172.26.0.0/16"] + pod_address_cidr_blocks = ["10.240.0.0/13"] + } + } + control_plane { + control_plane_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.9" + } + } + } + } + load_balancer { + port_config { + control_plane_load_balancer_port = 443 + } + vip_config { + control_plane_vip = "10.200.0.13" + ingress_vip = "10.200.0.14" + } + metal_lb_config { + address_pools { + pool = "pool1" + addresses = [ + "10.200.0.14/32", + "10.200.0.15/32", + "10.200.0.16/32", + "10.200.0.17/32", + "10.200.0.18/32", + "fd00:1::f/128", + "fd00:1::10/128", + "fd00:1::11/128", + "fd00:1::12/128" + ] + } + } + } + storage { + lvp_share_config { + lvp_config { + path = "/mnt/localpv-share" + storage_class = "local-shared" + } + shared_path_pv_count = 5 + } + lvp_node_mounts_config { + path = "/mnt/localpv-disk" + storage_class = "local-disks" + } + } + security_config { + authorization { + admin_users { + username = "admin@hashicorptest.com" + } + } + } + } +`, context) +} + +func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateMetalLb(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_bare_metal_cluster" "cluster-metallb" { + name = "cluster-metallb%{random_suffix}" + location = "us-west1" + annotations = { + env = "test-update" + } + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + bare_metal_version = "1.12.3" + network_config { + island_mode_cidr { + service_address_cidr_blocks = ["172.26.0.0/20"] + pod_address_cidr_blocks = ["10.240.0.0/14"] + } + } + control_plane { + control_plane_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.10" + } + } + } + } + load_balancer { + port_config { + control_plane_load_balancer_port = 80 + } + vip_config { + control_plane_vip = "10.200.0.14" + ingress_vip = "10.200.0.15" + } + metal_lb_config { + address_pools { + pool = "pool2" + addresses = [ + "10.200.0.14/32", + "10.200.0.15/32", + "10.200.0.16/32", + "10.200.0.17/32", + "fd00:1::f/128", + "fd00:1::10/128", + "fd00:1::11/128" + ] + } + } + } + storage { + lvp_share_config { + lvp_config { + path = "/mnt/localpv-share-updated" + storage_class = "local-shared-updated" + } + shared_path_pv_count = 6 + } + lvp_node_mounts_config { + path = "/mnt/localpv-disk-updated" + storage_class = "local-disks-updated" + } + } + security_config { + authorization { + admin_users { + username = "admin-updated@hashicorptest.com" + } + } + } + } +`, context) +} + +func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLbStart(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_bare_metal_cluster" "cluster-manuallb" { + name = "cluster-manuallb%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + bare_metal_version = "1.12.3" + network_config { + island_mode_cidr { + service_address_cidr_blocks = ["172.26.0.0/20"] + pod_address_cidr_blocks = ["10.240.0.0/14"] + } + } + control_plane { + control_plane_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.10" + } + } + } + } + load_balancer { + port_config { + control_plane_load_balancer_port = 80 + } + vip_config { + control_plane_vip = "10.200.0.13" + ingress_vip = "10.200.0.14" + } + metal_lb_config { + address_pools { + pool = "pool2" + addresses = [ + "10.200.0.14/32", + "10.200.0.15/32", + "10.200.0.16/32", + "10.200.0.17/32", + "fd00:1::f/128", + "fd00:1::10/128", + "fd00:1::11/128" + ] + } + } + } + storage { + lvp_share_config { + lvp_config { + path = "/mnt/localpv-share" + storage_class = "local-shared" + } + shared_path_pv_count = 6 + } + lvp_node_mounts_config { + path = "/mnt/localpv-disk" + storage_class = "local-disks" + } + } + security_config { + authorization { + admin_users { + username = "admin@hashicorptest.com" + } + } + } + binary_authorization { + evaluation_mode = "DISABLED" + } + upgrade_policy { + policy = "SERIAL" + } + } +`, context) +} + +func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateManualLb(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_bare_metal_cluster" "cluster-manuallb" { + name = "cluster-manuallb%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + bare_metal_version = "1.12.3" + network_config { + island_mode_cidr { + service_address_cidr_blocks = ["172.26.0.0/20"] + pod_address_cidr_blocks = ["10.240.0.0/14"] + } + } + control_plane { + control_plane_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.10" + } + } + } + } + load_balancer { + port_config { + control_plane_load_balancer_port = 80 + } + vip_config { + control_plane_vip = "10.200.0.14" + ingress_vip = "10.200.0.15" + } + manual_lb_config { + enabled = true + } + } + storage { + lvp_share_config { + lvp_config { + path = "/mnt/localpv-share-updated" + storage_class = "local-shared-updated" + } + shared_path_pv_count = 6 + } + lvp_node_mounts_config { + path = "/mnt/localpv-disk-updated" + storage_class = "local-disks-updated" + } + } + security_config { + authorization { + admin_users { + username = "admin-updated@hashicorptest.com" + } + } + } + binary_authorization { + evaluation_mode = "PROJECT_SINGLETON_POLICY_ENFORCE" + } + upgrade_policy { + policy = "CONCURRENT" + } + } +`, context) +} + +func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLbStart(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_bare_metal_cluster" "cluster-bgplb" { + name = "cluster-bgplb%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + bare_metal_version = "1.12.3" + network_config { + island_mode_cidr { + service_address_cidr_blocks = ["172.26.0.0/20"] + pod_address_cidr_blocks = ["10.240.0.0/14"] + } + } + control_plane { + control_plane_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.10" + } + } + } + } + load_balancer { + port_config { + control_plane_load_balancer_port = 80 + } + vip_config { + control_plane_vip = "10.200.0.13" + ingress_vip = "10.200.0.14" + } + bgp_lb_config { + asn = 123456 + bgp_peer_configs { + asn = 123457 + ip_address = "10.0.0.1" + control_plane_nodes = ["test-node"] + } + address_pools { + pool = "pool1" + addresses = [ + "10.200.0.14/32", + "fd00:1::12/128" + ] + } + load_balancer_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.9" + } + } + } + } + } + storage { + lvp_share_config { + lvp_config { + path = "/mnt/localpv-share" + storage_class = "local-shared" + } + shared_path_pv_count = 6 + } + lvp_node_mounts_config { + path = "/mnt/localpv-disk" + storage_class = "local-disks" + } + } + security_config { + authorization { + admin_users { + username = "admin@hashicorptest.com" + } + } + } + } +`, context) +} + +func testAccGkeonpremBareMetalCluster_bareMetalClusterUpdateBgpLb(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_bare_metal_cluster" "cluster-bgplb" { + name = "cluster-bgplb%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + bare_metal_version = "1.12.3" + network_config { + island_mode_cidr { + service_address_cidr_blocks = ["172.26.0.0/20"] + pod_address_cidr_blocks = ["10.240.0.0/14"] + } + } + control_plane { + control_plane_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.10" + } + } + } + } + load_balancer { + port_config { + control_plane_load_balancer_port = 80 + } + vip_config { + control_plane_vip = "10.200.0.14" + ingress_vip = "10.200.0.15" + } + bgp_lb_config { + asn = 123457 + bgp_peer_configs { + asn = 123458 + ip_address = "10.0.0.2" + control_plane_nodes = ["test-node-updated"] + } + address_pools { + pool = "pool2" + addresses = [ + "10.200.0.15/32", + "fd00:1::16/128" + ] + } + load_balancer_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.11" + } + } + } + } + } + storage { + lvp_share_config { + lvp_config { + path = "/mnt/localpv-share-updated" + storage_class = "local-shared-updated" + } + shared_path_pv_count = 6 + } + lvp_node_mounts_config { + path = "/mnt/localpv-disk-updated" + storage_class = "local-disks-updated" + } + } + security_config { + authorization { + admin_users { + username = "admin-updated@hashicorptest.com" + } + } + } + } +`, context) +} diff --git a/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_bare_metal_node_pool_test.go b/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_bare_metal_node_pool_test.go new file mode 100644 index 000000000000..67959eb8364f --- /dev/null +++ b/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_bare_metal_node_pool_test.go @@ -0,0 +1,226 @@ +package gkeonprem_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremBareMetalNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context), + }, + { + ResourceName: "google_gkeonprem_bare_metal_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) +} + +func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdateStart(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_bare_metal_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + bare_metal_version = "1.12.3" + network_config { + island_mode_cidr { + service_address_cidr_blocks = ["172.26.0.0/16"] + pod_address_cidr_blocks = ["10.240.0.0/13"] + } + } + control_plane { + control_plane_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.9" + } + } + } + } + load_balancer { + port_config { + control_plane_load_balancer_port = 443 + } + vip_config { + control_plane_vip = "10.200.0.13" + ingress_vip = "10.200.0.14" + } + metal_lb_config { + address_pools { + pool = "pool1" + addresses = [ + "10.200.0.14/32", + "10.200.0.15/32", + "10.200.0.16/32", + "10.200.0.17/32", + "10.200.0.18/32", + "fd00:1::f/128", + "fd00:1::10/128", + "fd00:1::11/128", + "fd00:1::12/128" + ] + } + } + } + storage { + lvp_share_config { + lvp_config { + path = "/mnt/localpv-share" + storage_class = "local-shared" + } + shared_path_pv_count = 5 + } + lvp_node_mounts_config { + path = "/mnt/localpv-disk" + storage_class = "local-disks" + } + } + security_config { + authorization { + admin_users { + username = "admin@hashicorptest.com" + } + } + } + } + + resource "google_gkeonprem_bare_metal_node_pool" "nodepool" { + name = "tf-test-nodepool-%{random_suffix}" + location = "us-west1" + bare_metal_cluster = google_gkeonprem_bare_metal_cluster.cluster.name + annotations = { + env = "test" + } + node_pool_config { + operating_system = "LINUX" + labels = {} + node_configs { + node_ip = "10.200.0.11" + labels = {} + } + } + } +`, context) +} + +func testAccGkeonpremBareMetalNodePool_bareMetalNodePoolUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_bare_metal_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + bare_metal_version = "1.12.3" + network_config { + island_mode_cidr { + service_address_cidr_blocks = ["172.26.0.0/16"] + pod_address_cidr_blocks = ["10.240.0.0/13"] + } + } + control_plane { + control_plane_node_pool_config { + node_pool_config { + labels = {} + operating_system = "LINUX" + node_configs { + labels = {} + node_ip = "10.200.0.9" + } + } + } + } + load_balancer { + port_config { + control_plane_load_balancer_port = 443 + } + vip_config { + control_plane_vip = "10.200.0.13" + ingress_vip = "10.200.0.14" + } + metal_lb_config { + address_pools { + pool = "pool1" + addresses = [ + "10.200.0.14/32", + "10.200.0.15/32", + "10.200.0.16/32", + "10.200.0.17/32", + "10.200.0.18/32", + "fd00:1::f/128", + "fd00:1::10/128", + "fd00:1::11/128", + "fd00:1::12/128" + ] + } + } + } + storage { + lvp_share_config { + lvp_config { + path = "/mnt/localpv-share" + storage_class = "local-shared" + } + shared_path_pv_count = 5 + } + lvp_node_mounts_config { + path = "/mnt/localpv-disk" + storage_class = "local-disks" + } + } + security_config { + authorization { + admin_users { + username = "admin@hashicorptest.com" + } + } + } + } + + resource "google_gkeonprem_bare_metal_node_pool" "nodepool" { + name = "tf-test-nodepool-%{random_suffix}" + location = "us-west1" + bare_metal_cluster = google_gkeonprem_bare_metal_cluster.cluster.name + annotations = { + env = "test-update" + } + node_pool_config { + operating_system = "LINUX" + labels = {} + node_configs { + node_ip = "10.200.0.12" + labels = {} + } + } + } +`, context) +} diff --git a/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_vmware_cluster_test.go b/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_vmware_cluster_test.go new file mode 100644 index 000000000000..0f303ba04155 --- /dev/null +++ b/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_vmware_cluster_test.go @@ -0,0 +1,484 @@ +package gkeonprem_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateBasic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) +} + +func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateF5Lb(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(t *testing.T) { + // VCR fails to handle batched project services + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context), + }, + { + ResourceName: "google_gkeonprem_vmware_cluster.cluster", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLbStart(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_vmware_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + description = "test cluster" + on_prem_version = "1.13.1-gke.35" + annotations = { + env = "test" + } + network_config { + service_address_cidr_blocks = ["10.96.0.0/12"] + pod_address_cidr_blocks = ["192.168.0.0/16"] + dhcp_ip_config { + enabled = true + } + } + control_plane_node { + cpus = 4 + memory = 8192 + replicas = 1 + } + load_balancer { + vip_config { + control_plane_vip = "10.251.133.5" + ingress_vip = "10.251.135.19" + } + metal_lb_config { + address_pools { + pool = "ingress-ip" + manual_assign = "true" + addresses = ["10.251.135.19"] + avoid_buggy_ips = true + } + address_pools { + pool = "lb-test-ip" + manual_assign = "true" + addresses = ["10.251.135.19"] + avoid_buggy_ips = true + } + } + } + } +`, context) +} + +func testAccGkeonpremVmwareCluster_vmwareClusterUpdateMetalLb(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_vmware_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + description = "test cluster updated" + on_prem_version = "1.13.1-gke.36" + annotations = { + env = "test-update" + } + network_config { + service_address_cidr_blocks = ["10.96.0.0/16"] + pod_address_cidr_blocks = ["192.168.0.0/20"] + dhcp_ip_config { + enabled = true + } + } + control_plane_node { + cpus = 5 + memory = 4098 + replicas = 3 + } + load_balancer { + vip_config { + control_plane_vip = "10.251.133.6" + ingress_vip = "10.251.135.20" + } + metal_lb_config { + address_pools { + pool = "ingress-ip-updated" + manual_assign = "false" + addresses = ["10.251.135.20"] + avoid_buggy_ips = false + } + address_pools { + pool = "lb-test-ip-updated" + manual_assign = "false" + addresses = ["10.251.135.20"] + avoid_buggy_ips = false + } + } + } + } +`, context) +} + +func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5LbStart(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_vmware_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + description = "test cluster" + on_prem_version = "1.13.1-gke.35" + annotations = {} + network_config { + service_address_cidr_blocks = ["10.96.0.0/12"] + pod_address_cidr_blocks = ["192.168.0.0/16"] + dhcp_ip_config { + enabled = true + } + control_plane_v2_config { + control_plane_ip_block { + ips { + hostname = "test-hostname" + ip = "10.0.0.1" + } + netmask="10.0.0.1/32" + gateway="test-gateway" + } + } + } + control_plane_node { + cpus = 4 + memory = 8192 + replicas = 1 + } + load_balancer { + vip_config { + control_plane_vip = "10.251.133.5" + ingress_vip = "10.251.135.19" + } + f5_config { + address = "10.0.0.1" + partition = "test-partition" + snat_pool = "test-snap-pool" + } + } + } +`, context) +} + +func testAccGkeonpremVmwareCluster_vmwareClusterUpdateF5lb(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_vmware_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + description = "test cluster" + on_prem_version = "1.13.1-gke.35" + annotations = {} + network_config { + service_address_cidr_blocks = ["10.96.0.0/12"] + pod_address_cidr_blocks = ["192.168.0.0/16"] + dhcp_ip_config { + enabled = true + } + control_plane_v2_config { + control_plane_ip_block { + ips { + hostname = "test-hostname-updated" + ip = "10.0.0.2" + } + netmask="10.0.0.2/32" + gateway="test-gateway-updated" + } + } + } + control_plane_node { + cpus = 4 + memory = 8192 + replicas = 1 + } + load_balancer { + vip_config { + control_plane_vip = "10.251.133.5" + ingress_vip = "10.251.135.19" + } + f5_config { + address = "10.0.0.2" + partition = "test-partition-updated" + snat_pool = "test-snap-pool-updated" + } + } + } +`, context) +} + +func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLbStart(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_vmware_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + description = "test cluster" + on_prem_version = "1.13.1-gke.35" + annotations = {} + network_config { + service_address_cidr_blocks = ["10.96.0.0/12"] + pod_address_cidr_blocks = ["192.168.0.0/16"] + host_config { + dns_servers = ["10.254.41.1"] + ntp_servers = ["216.239.35.8"] + dns_search_domains = ["test-domain"] + } + static_ip_config { + ip_blocks { + netmask = "255.255.252.0" + gateway = "10.251.31.254" + ips { + ip = "10.251.30.153" + hostname = "test-hostname1" + } + ips { + ip = "10.251.31.206" + hostname = "test-hostname2" + } + ips { + ip = "10.251.31.193" + hostname = "test-hostname3" + } + ips { + ip = "10.251.30.230" + hostname = "test-hostname4" + } + } + } + } + control_plane_node { + cpus = 4 + memory = 8192 + replicas = 1 + } + load_balancer { + vip_config { + control_plane_vip = "10.251.133.5" + ingress_vip = "10.251.135.19" + } + manual_lb_config { + ingress_http_node_port = 30005 + ingress_https_node_port = 30006 + control_plane_node_port = 30007 + konnectivity_server_node_port = 30008 + } + } + vcenter { + resource_pool = "test-resource-pool" + datastore = "test-datastore" + datacenter = "test-datacenter" + cluster = "test-cluster" + folder = "test-folder" + ca_cert_data = "test-ca-cert-data" + storage_policy_name = "test-storage-policy-name" + } + dataplane_v2 { + dataplane_v2_enabled = true + windows_dataplane_v2_enabled = true + advanced_networking = true + } + vm_tracking_enabled = true + enable_control_plane_v2 = true + disable_bundled_ingress = true + upgrade_policy { + control_plane_only = true + } + authorization { + admin_users { + username = "testuser@gmail.com" + } + } + anti_affinity_groups { + aag_config_disabled = true + } + auto_repair_config { + enabled = true + } + } +`, context) +} + +func testAccGkeonpremVmwareCluster_vmwareClusterUpdateManualLb(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_vmware_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + description = "test cluster" + on_prem_version = "1.13.1-gke.35" + annotations = {} + network_config { + service_address_cidr_blocks = ["10.96.0.0/12"] + pod_address_cidr_blocks = ["192.168.0.0/16"] + host_config { + dns_servers = ["10.254.41.1"] + ntp_servers = ["216.239.35.8"] + dns_search_domains = ["test-domain"] + } + static_ip_config { + ip_blocks { + netmask = "255.255.252.1" + gateway = "10.251.31.255" + ips { + ip = "10.251.30.154" + hostname = "test-hostname1-updated" + } + ips { + ip = "10.251.31.206" + hostname = "test-hostname2" + } + ips { + ip = "10.251.31.193" + hostname = "test-hostname3" + } + ips { + ip = "10.251.30.230" + hostname = "test-hostname4" + } + } + } + } + control_plane_node { + cpus = 4 + memory = 8192 + replicas = 1 + } + load_balancer { + vip_config { + control_plane_vip = "10.251.133.5" + ingress_vip = "10.251.135.19" + } + manual_lb_config { + ingress_http_node_port = 30006 + ingress_https_node_port = 30007 + control_plane_node_port = 30008 + konnectivity_server_node_port = 30009 + } + } + vcenter { + resource_pool = "test-resource-pool-updated" + datastore = "test-datastore-updated" + datacenter = "test-datacenter-updated" + cluster = "test-cluster-updated" + folder = "test-folder-updated" + ca_cert_data = "test-ca-cert-data-updated" + storage_policy_name = "test-storage-policy-name-updated" + } + dataplane_v2 { + dataplane_v2_enabled = true + windows_dataplane_v2_enabled = true + advanced_networking = true + } + vm_tracking_enabled = false + enable_control_plane_v2 = false + disable_bundled_ingress = false + upgrade_policy { + control_plane_only = true + } + authorization { + admin_users { + username = "testuser-updated@gmail.com" + } + } + anti_affinity_groups { + aag_config_disabled = true + } + auto_repair_config { + enabled = true + } + } +`, context) +} diff --git a/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_vmware_node_pool_test.go b/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_vmware_node_pool_test.go new file mode 100644 index 000000000000..0be6f8fa29a0 --- /dev/null +++ b/mmv1/third_party/terraform/services/gkeonprem/go/resource_gkeonprem_vmware_node_pool_test.go @@ -0,0 +1,212 @@ +package gkeonprem_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckGkeonpremVmwareNodePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context), + }, + { + ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + { + Config: testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context), + }, + { + ResourceName: "google_gkeonprem_vmware_node_pool.nodepool", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"annotations"}, + }, + }, + }) +} + +func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdateStart(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_vmware_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + description = "test cluster" + on_prem_version = "1.13.1-gke.35" + annotations = {} + network_config { + service_address_cidr_blocks = ["10.96.0.0/12"] + pod_address_cidr_blocks = ["192.168.0.0/16"] + dhcp_ip_config { + enabled = true + } + } + control_plane_node { + cpus = 4 + memory = 8192 + replicas = 1 + } + load_balancer { + vip_config { + control_plane_vip = "10.251.133.5" + ingress_vip = "10.251.135.19" + } + metal_lb_config { + address_pools { + pool = "ingress-ip" + manual_assign = "true" + addresses = ["10.251.135.19"] + avoid_buggy_ips = true + } + address_pools { + pool = "lb-test-ip" + manual_assign = "true" + addresses = ["10.251.135.19"] + avoid_buggy_ips = true + } + } + } + } + + resource "google_gkeonprem_vmware_node_pool" "nodepool" { + name = "tf-test-nodepool-%{random_suffix}" + location = "us-west1" + vmware_cluster = google_gkeonprem_vmware_cluster.cluster.name + annotations = { + env = "test" + } + config { + cpus = 4 + memory_mb = 8196 + replicas = 3 + image_type = "ubuntu_containerd" + image = "image" + boot_disk_size_gb = 10 + taints { + key = "key" + value = "value" + } + labels = {} + vsphere_config { + datastore = "test-datastore" + tags { + category = "test-category-1" + tag = "tag-1" + } + tags { + category = "test-category-2" + tag = "tag-2" + } + host_groups = ["host1", "host2"] + } + enable_load_balancer = true + } + node_pool_autoscaling { + min_replicas = 1 + max_replicas = 5 + } + } +`, context) +} + +func testAccGkeonpremVmwareNodePool_vmwareNodePoolUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` + + resource "google_gkeonprem_vmware_cluster" "cluster" { + name = "tf-test-cluster-%{random_suffix}" + location = "us-west1" + admin_cluster_membership = "projects/870316890899/locations/global/memberships/gkeonprem-terraform-test" + description = "test cluster" + on_prem_version = "1.13.1-gke.35" + annotations = {} + network_config { + service_address_cidr_blocks = ["10.96.0.0/12"] + pod_address_cidr_blocks = ["192.168.0.0/16"] + dhcp_ip_config { + enabled = true + } + } + control_plane_node { + cpus = 4 + memory = 8192 + replicas = 1 + } + load_balancer { + vip_config { + control_plane_vip = "10.251.133.5" + ingress_vip = "10.251.135.19" + } + metal_lb_config { + address_pools { + pool = "ingress-ip" + manual_assign = "true" + addresses = ["10.251.135.19"] + avoid_buggy_ips = true + } + address_pools { + pool = "lb-test-ip" + manual_assign = "true" + addresses = ["10.251.135.19"] + avoid_buggy_ips = true + } + } + } + } + + resource "google_gkeonprem_vmware_node_pool" "nodepool" { + name = "tf-test-nodepool-%{random_suffix}" + location = "us-west1" + vmware_cluster = google_gkeonprem_vmware_cluster.cluster.name + annotations = { + env = "test-update" + } + config { + cpus = 5 + memory_mb = 4096 + replicas = 3 + image_type = "windows" + image = "image-updated" + boot_disk_size_gb = 12 + taints { + key = "key-updated" + value = "value-updated" + } + labels = {} + vsphere_config { + datastore = "test-datastore-update" + tags { + category = "test-category-3" + tag = "tag-3" + } + tags { + category = "test-category-4" + tag = "tag-4" + } + host_groups = ["host3", "host4"] + } + enable_load_balancer = false + } + node_pool_autoscaling { + min_replicas = 2 + max_replicas = 6 + } + } +`, context) +} diff --git a/mmv1/third_party/terraform/services/healthcare/go/resource_healthcare_fhir_store_test.go.tmpl b/mmv1/third_party/terraform/services/healthcare/go/resource_healthcare_fhir_store_test.go.tmpl new file mode 100644 index 000000000000..ec54cd6a1326 --- /dev/null +++ b/mmv1/third_party/terraform/services/healthcare/go/resource_healthcare_fhir_store_test.go.tmpl @@ -0,0 +1,232 @@ +package healthcare_test + +import ( + "fmt" + "path" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestAccHealthcareFhirStoreIdParsing(t *testing.T) { + t.Parallel() + + cases := map[string]struct { + ImportId string + ExpectedError bool + ExpectedTerraformId string + ExpectedFhirStoreId string + Config *transport_tpg.Config + }{ + "id is in project/location/datasetName/fhirStoreName format": { + ImportId: "test-project/us-central1/test-dataset/test-store-name", + ExpectedError: false, + ExpectedTerraformId: "test-project/us-central1/test-dataset/test-store-name", + ExpectedFhirStoreId: "projects/test-project/locations/us-central1/datasets/test-dataset/fhirStores/test-store-name", + }, + "id is in domain:project/location/datasetName/fhirStoreName format": { + ImportId: "example.com:test-project/us-central1/test-dataset/test-store-name", + ExpectedError: false, + ExpectedTerraformId: "example.com:test-project/us-central1/test-dataset/test-store-name", + ExpectedFhirStoreId: "projects/example.com:test-project/locations/us-central1/datasets/test-dataset/fhirStores/test-store-name", + }, + "id is in location/datasetName/fhirStoreName format": { + ImportId: "us-central1/test-dataset/test-store-name", + ExpectedError: false, + ExpectedTerraformId: "test-project/us-central1/test-dataset/test-store-name", + ExpectedFhirStoreId: "projects/test-project/locations/us-central1/datasets/test-dataset/fhirStores/test-store-name", + Config: &transport_tpg.Config{Project: "test-project"}, + }, + "id is in location/datasetName/fhirStoreName format without project in config": { + ImportId: "us-central1/test-dataset/test-store-name", + ExpectedError: true, + Config: &transport_tpg.Config{Project: ""}, + }, + } + + for tn, tc := range cases { + fhirStoreId, err := healthcare.ParseHealthcareFhirStoreId(tc.ImportId, tc.Config) + + if tc.ExpectedError && err == nil { + t.Fatalf("bad: %s, expected an error", tn) + } + + if err != nil { + if tc.ExpectedError { + continue + } + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + if fhirStoreId.TerraformId() != tc.ExpectedTerraformId { + t.Fatalf("bad: %s, expected Terraform ID to be `%s` but is `%s`", tn, tc.ExpectedTerraformId, fhirStoreId.TerraformId()) + } + + if fhirStoreId.FhirStoreId() != tc.ExpectedFhirStoreId { + t.Fatalf("bad: %s, expected FhirStore ID to be `%s` but is `%s`", tn, tc.ExpectedFhirStoreId, fhirStoreId.FhirStoreId()) + } + } +} + +func TestAccHealthcareFhirStore_basic(t *testing.T) { + t.Parallel() + + datasetName := fmt.Sprintf("tf-test-dataset-%s", acctest.RandString(t, 10)) + fhirStoreName := fmt.Sprintf("tf-test-fhir-store-%s", acctest.RandString(t, 10)) + pubsubTopic := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10)) + resourceName := "google_healthcare_fhir_store.default" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckHealthcareFhirStoreDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleHealthcareFhirStore_basic(fhirStoreName, datasetName), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testGoogleHealthcareFhirStore_update(fhirStoreName, datasetName, pubsubTopic), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleHealthcareFhirStoreUpdate(t, pubsubTopic), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testGoogleHealthcareFhirStore_basic(fhirStoreName, datasetName), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testGoogleHealthcareFhirStore_basic(fhirStoreName, datasetName string) string { + return fmt.Sprintf(` +resource "google_healthcare_fhir_store" "default" { + name = "%s" + dataset = google_healthcare_dataset.dataset.id + + enable_update_create = false + disable_referential_integrity = false + disable_resource_versioning = false + enable_history_import = false + version = "R4" +{{- if ne $.TargetVersionName "ga" }} + enable_history_modifications = false +{{- end }} +} + +resource "google_healthcare_dataset" "dataset" { + name = "%s" + location = "us-central1" +} +`, fhirStoreName, datasetName) +} + +func testGoogleHealthcareFhirStore_update(fhirStoreName, datasetName, pubsubTopic string) string { + return fmt.Sprintf(` +resource "google_healthcare_fhir_store" "default" { + name = "%s" + dataset = google_healthcare_dataset.dataset.id + + enable_update_create = true + version = "R4" + + + notification_configs { + pubsub_topic = google_pubsub_topic.topic.id + send_full_resource = true + send_previous_resource_on_delete = true + } +{{- if ne $.TargetVersionName "ga" }} + enable_history_modifications = true +{{- end }} + + labels = { + label1 = "labelvalue1" + } +} + +resource "google_healthcare_dataset" "dataset" { + name = "%s" + location = "us-central1" +} + +resource "google_pubsub_topic" "topic" { + name = "%s" +} +`, fhirStoreName, datasetName, pubsubTopic) +} + +func testAccCheckGoogleHealthcareFhirStoreUpdate(t *testing.T, pubsubTopic string) resource.TestCheckFunc { + return func(s *terraform.State) error { + var foundResource = false + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_healthcare_fhir_store" { + continue + } + foundResource = true + + config := acctest.GoogleProviderConfig(t) + + gcpResourceUri, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}dataset{{"}}"}}/fhirStores/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + response, err := config.NewHealthcareClient(config.UserAgent).Projects.Locations.Datasets.FhirStores.Get(gcpResourceUri).Do() + if err != nil { + return fmt.Errorf("Unexpected failure while verifying 'updated' dataset: %s", err) + } + + if !response.EnableUpdateCreate { + return fmt.Errorf("fhirStore 'EnableUpdateCreate' not updated: %s", gcpResourceUri) + } + + // because the GET for the FHIR store resource does not return the "enableHistoryImport" flag, this value + // will always be false and cannot be relied upon + + //if !response.EnableHistoryImport { + // return fmt.Errorf("fhirStore 'EnableHistoryImport' not updated: %s", gcpResourceUri) + //} + + if len(response.Labels) == 0 || response.Labels["label1"] != "labelvalue1" { + return fmt.Errorf("fhirStore labels not updated: %s", gcpResourceUri) + } + + notifications := response.NotificationConfigs + if len(notifications) > 0 { + topicName := path.Base(notifications[0].PubsubTopic) + if topicName != pubsubTopic { + return fmt.Errorf("fhirStore 'NotificationConfig' not updated ('%s' != '%s'): %s", topicName, pubsubTopic, gcpResourceUri) + } + } + } + + if !foundResource { + return fmt.Errorf("google_healthcare_fhir_store resource was missing") + } + return nil + } +} diff --git a/mmv1/third_party/terraform/services/healthcare/go/resource_healthcare_hl7_v2_store_test.go.tmpl b/mmv1/third_party/terraform/services/healthcare/go/resource_healthcare_hl7_v2_store_test.go.tmpl new file mode 100644 index 000000000000..d68e93b16a9b --- /dev/null +++ b/mmv1/third_party/terraform/services/healthcare/go/resource_healthcare_hl7_v2_store_test.go.tmpl @@ -0,0 +1,311 @@ +package healthcare_test + +import ( + "fmt" + "path" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/healthcare" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccHealthcareHl7V2StoreIdParsing(t *testing.T) { + t.Parallel() + + cases := map[string]struct { + ImportId string + ExpectedError bool + ExpectedTerraformId string + ExpectedHl7V2StoreId string + Config *transport_tpg.Config + }{ + "id is in project/location/datasetName/hl7V2StoreName format": { + ImportId: "test-project/us-central1/test-dataset/test-store-name", + ExpectedError: false, + ExpectedTerraformId: "test-project/us-central1/test-dataset/test-store-name", + ExpectedHl7V2StoreId: "projects/test-project/locations/us-central1/datasets/test-dataset/hl7V2Stores/test-store-name", + }, + "id is in domain:project/location/datasetName/hl7V2StoreName format": { + ImportId: "example.com:test-project/us-central1/test-dataset/test-store-name", + ExpectedError: false, + ExpectedTerraformId: "example.com:test-project/us-central1/test-dataset/test-store-name", + ExpectedHl7V2StoreId: "projects/example.com:test-project/locations/us-central1/datasets/test-dataset/hl7V2Stores/test-store-name", + }, + "id is in location/datasetName/hl7V2StoreName format": { + ImportId: "us-central1/test-dataset/test-store-name", + ExpectedError: false, + ExpectedTerraformId: "test-project/us-central1/test-dataset/test-store-name", + ExpectedHl7V2StoreId: "projects/test-project/locations/us-central1/datasets/test-dataset/hl7V2Stores/test-store-name", + Config: &transport_tpg.Config{Project: "test-project"}, + }, + "id is in location/datasetName/hl7V2StoreName format without project in config": { + ImportId: "us-central1/test-dataset/test-store-name", + ExpectedError: true, + Config: &transport_tpg.Config{Project: ""}, + }, + } + + for tn, tc := range cases { + hl7V2StoreId, err := healthcare.ParseHealthcareHl7V2StoreId(tc.ImportId, tc.Config) + + if tc.ExpectedError && err == nil { + t.Fatalf("bad: %s, expected an error", tn) + } + + if err != nil { + if tc.ExpectedError { + continue + } + t.Fatalf("bad: %s, err: %#v", tn, err) + } + + if hl7V2StoreId.TerraformId() != tc.ExpectedTerraformId { + t.Fatalf("bad: %s, expected Terraform ID to be `%s` but is `%s`", tn, tc.ExpectedTerraformId, hl7V2StoreId.TerraformId()) + } + + if hl7V2StoreId.Hl7V2StoreId() != tc.ExpectedHl7V2StoreId { + t.Fatalf("bad: %s, expected Hl7V2Store ID to be `%s` but is `%s`", tn, tc.ExpectedHl7V2StoreId, hl7V2StoreId.Hl7V2StoreId()) + } + } +} + +func TestAccHealthcareHl7V2Store_basic(t *testing.T) { + t.Parallel() + + datasetName := fmt.Sprintf("tf-test-dataset-%s", acctest.RandString(t, 10)) + hl7_v2StoreName := fmt.Sprintf("tf-test-hl7_v2-store-%s", acctest.RandString(t, 10)) + pubsubTopic := fmt.Sprintf("tf-test-topic-%s", acctest.RandString(t, 10)) + resourceName := "google_healthcare_hl7_v2_store.default" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckHealthcareHl7V2StoreDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleHealthcareHl7V2Store_basic(hl7_v2StoreName, datasetName), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testGoogleHealthcareHl7V2Store_update(hl7_v2StoreName, datasetName, pubsubTopic), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleHealthcareHl7V2StoreUpdate(t, pubsubTopic), + ), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testGoogleHealthcareHl7V2Store_basic(hl7_v2StoreName, datasetName), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccHealthcareHl7V2Store_updateSchema(t *testing.T) { + t.Parallel() + + datasetName := fmt.Sprintf("tf-test-dataset-%s", acctest.RandString(t, 10)) + hl7_v2StoreName := fmt.Sprintf("tf-test-hl7_v2-store-%s", acctest.RandString(t, 10)) + resourceName := "google_healthcare_hl7_v2_store.default" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckHealthcareHl7V2StoreDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleHealthcareHl7V2Store_basicSchema(hl7_v2StoreName, datasetName), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testGoogleHealthcareHl7V2Store_updateSchema(hl7_v2StoreName, datasetName), + }, + { + ResourceName: resourceName, + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func testGoogleHealthcareHl7V2Store_basic(hl7_v2StoreName, datasetName string) string { + return fmt.Sprintf(` +resource "google_healthcare_hl7_v2_store" "default" { + name = "%s" + dataset = google_healthcare_dataset.dataset.id + reject_duplicate_message = true +} + +resource "google_healthcare_dataset" "dataset" { + name = "%s" + location = "us-central1" +} +`, hl7_v2StoreName, datasetName) +} + +func testGoogleHealthcareHl7V2Store_update(hl7_v2StoreName, datasetName, pubsubTopic string) string { + return fmt.Sprintf(` +resource "google_healthcare_hl7_v2_store" "default" { + name = "%s" + dataset = google_healthcare_dataset.dataset.id + + parser_config { + allow_null_header = true + segment_terminator = "Jw==" + } + + notification_configs { + pubsub_topic = google_pubsub_topic.topic.id + } + + labels = { + label1 = "labelvalue1" + } +} + +resource "google_healthcare_dataset" "dataset" { + name = "%s" + location = "us-central1" +} + +resource "google_pubsub_topic" "topic" { + name = "%s" +} +`, hl7_v2StoreName, datasetName, pubsubTopic) +} + +{{ if ne $.TargetVersionName `ga` -}} +func testGoogleHealthcareHl7V2Store_basicSchema(hl7_v2StoreName, datasetName string) string { + return fmt.Sprintf(` +resource "google_healthcare_hl7_v2_store" "default" { + provider = google-beta + name = "%s" + dataset = google_healthcare_dataset.dataset.id + + parser_config { + schema = < 0 { + topicName := path.Base(notifications[0].PubsubTopic) + if topicName != pubsubTopic { + return fmt.Errorf("hl7_v2_store 'NotificationConfig' not updated ('%s' != '%s'): %s", topicName, pubsubTopic, gcpResourceUri) + } + } + } + + if !foundResource { + return fmt.Errorf("google_healthcare_hl7_v2_store resource was missing") + } + return nil + } +} diff --git a/mmv1/third_party/terraform/services/iam2/go/resource_iam_deny_policy_test.go b/mmv1/third_party/terraform/services/iam2/go/resource_iam_deny_policy_test.go new file mode 100644 index 000000000000..2cc5c3a9c80b --- /dev/null +++ b/mmv1/third_party/terraform/services/iam2/go/resource_iam_deny_policy_test.go @@ -0,0 +1,220 @@ +package iam2_test + +import ( + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccIAM2DenyPolicy_iamDenyPolicyUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAM2DenyPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAM2DenyPolicy_iamDenyPolicyUpdate(context), + }, + { + ResourceName: "google_iam_deny_policy.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "parent"}, + }, + { + Config: testAccIAM2DenyPolicy_iamDenyPolicyUpdate2(context), + }, + { + ResourceName: "google_iam_deny_policy.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "parent"}, + }, + { + Config: testAccIAM2DenyPolicy_iamDenyPolicyUpdate(context), + }, + { + ResourceName: "google_iam_deny_policy.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "parent"}, + }, + }, + }) +} + +func TestAccIAM2DenyPolicy_iamDenyPolicyFolderParent(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAM2DenyPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAM2DenyPolicy_iamDenyPolicyFolder(context), + }, + { + ResourceName: "google_iam_deny_policy.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "parent"}, + }, + { + Config: testAccIAM2DenyPolicy_iamDenyPolicyFolderUpdate(context), + }, + { + ResourceName: "google_iam_deny_policy.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "parent"}, + }, + }, + }) +} + +func testAccIAM2DenyPolicy_iamDenyPolicyUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} + +resource "google_iam_deny_policy" "example" { + parent = urlencode("cloudresourcemanager.googleapis.com/projects/${google_project.project.project_id}") + name = "tf-test-my-deny-policy%{random_suffix}" + display_name = "A deny rule" + rules { + description = "First rule" + deny_rule { + denied_principals = ["principal://iam.googleapis.com/projects/-/serviceAccounts/${google_service_account.test-account.email}"] + denial_condition { + title = "Some expr" + expression = "!resource.matchTag('12345678/env', 'test')" + } + denied_permissions = ["cloudresourcemanager.googleapis.com/projects.update"] + } + } + rules { + description = "Second rule" + deny_rule { + denied_principals = ["principalSet://goog/public:all"] + denial_condition { + title = "Some expr" + expression = "!resource.matchTag('12345678/env', 'test')" + } + denied_permissions = ["cloudresourcemanager.googleapis.com/projects.update"] + exception_principals = ["principal://iam.googleapis.com/projects/-/serviceAccounts/${google_service_account.test-account.email}"] + } + } +} + +resource "google_service_account" "test-account" { + account_id = "tf-test-deny-account%{random_suffix}" + display_name = "Test Service Account" + project = google_project.project.project_id +} +`, context) +} + +func testAccIAM2DenyPolicy_iamDenyPolicyUpdate2(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "project" { + project_id = "tf-test%{random_suffix}" + name = "tf-test%{random_suffix}" + org_id = "%{org_id}" + billing_account = "%{billing_account}" +} + +resource "google_iam_deny_policy" "example" { + parent = urlencode("cloudresourcemanager.googleapis.com/projects/${google_project.project.project_id}") + name = "tf-test-my-deny-policy%{random_suffix}" + display_name = "A deny rule" + rules { + description = "Second rule" + deny_rule { + denied_principals = ["principalSet://goog/public:all"] + denial_condition { + title = "Some other expr" + expression = "!resource.matchTag('87654321/env', 'test')" + location = "/some/file" + description = "A denial condition" + } + denied_permissions = ["cloudresourcemanager.googleapis.com/projects.update"] + } + } +} + +resource "google_service_account" "test-account" { + account_id = "tf-test-deny-account%{random_suffix}" + display_name = "Test Service Account" + project = google_project.project.project_id +} +`, context) +} + +func testAccIAM2DenyPolicy_iamDenyPolicyFolder(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_deny_policy" "example" { + parent = urlencode("cloudresourcemanager.googleapis.com/${google_folder.folder.id}") + name = "tf-test-my-deny-policy%{random_suffix}" + display_name = "A deny rule" + rules { + description = "Second rule" + deny_rule { + denied_principals = ["principalSet://goog/public:all"] + denial_condition { + title = "Some expr" + expression = "!resource.matchTag('12345678/env', 'test')" + } + denied_permissions = ["cloudresourcemanager.googleapis.com/projects.delete"] + } + } +} + +resource "google_folder" "folder" { + display_name = "tf-test-%{random_suffix}" + parent = "organizations/%{org_id}" +} +`, context) +} + +func testAccIAM2DenyPolicy_iamDenyPolicyFolderUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_deny_policy" "example" { + parent = urlencode("cloudresourcemanager.googleapis.com/${google_folder.folder.id}") + name = "tf-test-my-deny-policy%{random_suffix}" + display_name = "A deny rule" + rules { + description = "Second rule" + deny_rule { + denied_principals = ["principalSet://goog/public:all"] + denied_permissions = ["cloudresourcemanager.googleapis.com/projects.delete"] + } + } +} + +resource "google_folder" "folder" { + display_name = "tf-test-%{random_suffix}" + parent = "organizations/%{org_id}" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool.go.tmpl b/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool.go.tmpl new file mode 100644 index 000000000000..377d473692ce --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool.go.tmpl @@ -0,0 +1,44 @@ +package iambeta + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func DataSourceIAMBetaWorkloadIdentityPool() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceIAMBetaWorkloadIdentityPool().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "workload_identity_pool_id") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceIAMBetaWorkloadIdentityPoolRead, + Schema: dsSchema, + } +} + +func dataSourceIAMBetaWorkloadIdentityPoolRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/locations/global/workloadIdentityPools/{{"{{"}}workload_identity_pool_id{{"}}"}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceIAMBetaWorkloadIdentityPoolRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_provider.go.tmpl b/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_provider.go.tmpl new file mode 100644 index 000000000000..774f9a40fa89 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_provider.go.tmpl @@ -0,0 +1,45 @@ +package iambeta + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func DataSourceIAMBetaWorkloadIdentityPoolProvider() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceIAMBetaWorkloadIdentityPoolProvider().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "workload_identity_pool_id") + tpgresource.AddRequiredFieldsToSchema(dsSchema, "workload_identity_pool_provider_id") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceIAMBetaWorkloadIdentityPoolProviderRead, + Schema: dsSchema, + } +} + +func dataSourceIAMBetaWorkloadIdentityPoolProviderRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/locations/global/workloadIdentityPools/{{"{{"}}workload_identity_pool_id{{"}}"}}/providers/{{"{{"}}workload_identity_pool_provider_id{{"}}"}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceIAMBetaWorkloadIdentityPoolProviderRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + + return nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_provider_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_provider_test.go.tmpl new file mode 100644 index 000000000000..2b9b50978b1f --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_provider_test.go.tmpl @@ -0,0 +1,61 @@ +package iambeta_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDataSourceIAMBetaWorkloadIdentityPoolProvider_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolProviderDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceIAMBetaWorkloadIdentityPoolProviderBasic(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_iam_workload_identity_pool_provider.foo", "google_iam_workload_identity_pool_provider.bar"), + ), + }, + }, + }) +} + +func testAccDataSourceIAMBetaWorkloadIdentityPoolProviderBasic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "pool" { + workload_identity_pool_id = "pool-%{random_suffix}" +} + +resource "google_iam_workload_identity_pool_provider" "bar" { + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_provider_id = "bar-provider-%{random_suffix}" + display_name = "Name of provider" + description = "OIDC identity pool provider for automated test" + disabled = true + attribute_condition = "\"e968c2ef-047c-498d-8d79-16ca1b61e77e\" in assertion.groups" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + oidc { + allowed_audiences = ["https://example.com/gcp-oidc-federation"] + issuer_uri = "https://sts.windows.net/azure-tenant-id" + } + } + +data "google_iam_workload_identity_pool_provider" "foo" { + workload_identity_pool_id = google_iam_workload_identity_pool.pool.workload_identity_pool_id + workload_identity_pool_provider_id = google_iam_workload_identity_pool_provider.bar.workload_identity_pool_provider_id +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_test.go.tmpl new file mode 100644 index 000000000000..b34f354a625a --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/go/data_source_iam_workload_identity_pool_test.go.tmpl @@ -0,0 +1,47 @@ +package iambeta_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccDataSourceIAMBetaWorkloadIdentityPool_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccDataSourceIAMBetaWorkloadIdentityPoolBasic(context), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_iam_workload_identity_pool.foo", "google_iam_workload_identity_pool.bar"), + ), + }, + }, + }) +} + +func testAccDataSourceIAMBetaWorkloadIdentityPoolBasic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "bar" { + workload_identity_pool_id = "bar-pool-%{random_suffix}" + display_name = "Name of pool" + description = "Identity pool for automated test" + disabled = true +} + +data "google_iam_workload_identity_pool" "foo" { + workload_identity_pool_id = google_iam_workload_identity_pool.bar.workload_identity_pool_id +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_id_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_id_test.go.tmpl new file mode 100644 index 000000000000..900961532d80 --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_id_test.go.tmpl @@ -0,0 +1,36 @@ +package iambeta_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/services/iambeta" + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +func TestValidateIAMBetaWorkloadIdentityPoolId(t *testing.T) { + x := []verify.StringValidationTestCase{ + // No errors + {TestName: "basic", Value: "foobar"}, + {TestName: "with numbers", Value: "foobar123"}, + {TestName: "short", Value: "foos"}, + {TestName: "long", Value: "12345678901234567890123456789012"}, + {TestName: "has a hyphen", Value: "foo-bar"}, + + // With errors + {TestName: "empty", Value: "", ExpectError: true}, + {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, + {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, + {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, + {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, + {TestName: "too short", Value: "foo", ExpectError: true}, + {TestName: "too long", Value: strings.Repeat("f", 33), ExpectError: true}, + } + + es := verify.TestStringValidationCases(x, iambeta.ValidateWorkloadIdentityPoolId) + if len(es) > 0 { + t.Errorf("Failed to validate WorkloadIdentityPool names: %v", es) + } +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_provider_id_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_provider_id_test.go.tmpl new file mode 100644 index 000000000000..fc3c42422a6a --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_provider_id_test.go.tmpl @@ -0,0 +1,36 @@ +package iambeta_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/services/iambeta" + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +func TestValidateIAMBetaWorkloadIdentityPoolProviderId(t *testing.T) { + x := []verify.StringValidationTestCase{ + // No errors + {TestName: "basic", Value: "foobar"}, + {TestName: "with numbers", Value: "foobar123"}, + {TestName: "short", Value: "foos"}, + {TestName: "long", Value: "12345678901234567890123456789012"}, + {TestName: "has a hyphen", Value: "foo-bar"}, + + // With errors + {TestName: "empty", Value: "", ExpectError: true}, + {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, + {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, + {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, + {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, + {TestName: "too short", Value: "foo", ExpectError: true}, + {TestName: "too long", Value: strings.Repeat("f", 33), ExpectError: true}, + } + + es := verify.TestStringValidationCases(x, iambeta.ValidateWorkloadIdentityPoolProviderId) + if len(es) > 0 { + t.Errorf("Failed to validate WorkloadIdentityPoolProvider names: %v", es) + } +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_provider_test.go.tmpl b/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_provider_test.go.tmpl new file mode 100644 index 000000000000..062850fb5b6e --- /dev/null +++ b/mmv1/third_party/terraform/services/iambeta/go/resource_iam_workload_identity_pool_provider_test.go.tmpl @@ -0,0 +1,242 @@ +package iambeta_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccIAMBetaWorkloadIdentityPoolProvider_aws(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolProviderDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPoolProvider_aws_full(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMBetaWorkloadIdentityPoolProvider_aws_enabled(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMBetaWorkloadIdentityPoolProvider_aws_basic(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccIAMBetaWorkloadIdentityPoolProvider_oidc(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMBetaWorkloadIdentityPoolProviderDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMBetaWorkloadIdentityPoolProvider_oidc_full(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMBetaWorkloadIdentityPoolProvider_oidc_update(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMBetaWorkloadIdentityPoolProvider_oidc_basic(context), + }, + { + ResourceName: "google_iam_workload_identity_pool_provider.my_provider", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccIAMBetaWorkloadIdentityPoolProvider_aws_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + workload_identity_pool_id = "my-pool-%{random_suffix}" +} + +resource "google_iam_workload_identity_pool_provider" "my_provider" { + workload_identity_pool_id = google_iam_workload_identity_pool.my_pool.workload_identity_pool_id + workload_identity_pool_provider_id = "my-provider-%{random_suffix}" + display_name = "Name of provider" + description = "AWS identity pool provider for automated test" + disabled = true + attribute_condition = "attribute.aws_role==\"arn:aws:sts::999999999999:assumed-role/stack-eu-central-1-lambdaRole\"" + attribute_mapping = { + "google.subject" = "assertion.arn" + "attribute.aws_account" = "assertion.account" + "attribute.environment" = "assertion.arn.contains(\":instance-profile/Production\") ? \"prod\" : \"test\"" + } + aws { + account_id = "999999999999" + } +} +`, context) +} + +func testAccIAMBetaWorkloadIdentityPoolProvider_aws_enabled(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + workload_identity_pool_id = "my-pool-%{random_suffix}" +} + +resource "google_iam_workload_identity_pool_provider" "my_provider" { + workload_identity_pool_id = google_iam_workload_identity_pool.my_pool.workload_identity_pool_id + workload_identity_pool_provider_id = "my-provider-%{random_suffix}" + display_name = "Name of provider" + description = "AWS identity pool provider for automated test" + disabled = false + attribute_condition = "attribute.aws_role==\"arn:aws:sts::999999999999:assumed-role/stack-eu-central-1-lambdaRole\"" + attribute_mapping = { + "google.subject" = "assertion.arn" + "attribute.aws_account" = "assertion.account" + "attribute.environment" = "assertion.arn.contains(\":instance-profile/Production\") ? \"prod\" : \"test\"" + } + aws { + account_id = "999999999999" + } +} +`, context) +} + +func testAccIAMBetaWorkloadIdentityPoolProvider_oidc_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workload_identity_pool" "my_pool" { + workload_identity_pool_id = "my-pool-%{random_suffix}" +} + +resource "google_iam_workload_identity_pool_provider" "my_provider" { + workload_identity_pool_id = google_iam_workload_identity_pool.my_pool.workload_identity_pool_id + workload_identity_pool_provider_id = "my-provider-%{random_suffix}" + display_name = "Name of provider" + description = "OIDC identity pool provider for automated test" + disabled = true + attribute_condition = "\"e968c2ef-047c-498d-8d79-16ca1b61e77e\" in assertion.groups" + attribute_mapping = { + "google.subject" = "\"azure::\" + assertion.tid + \"::\" + assertion.sub" + "attribute.tid" = "assertion.tid" + "attribute.managed_identity_name" = < MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } + display_name = "Display name" + description = "A sample SAML workforce pool provider." + disabled = false + attribute_condition = "true" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolProvider_saml_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject": "false" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } + display_name = "New Display name" + description = "A sample SAML workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolProvider_saml_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + saml { + idp_metadata_xml = " MIIDpDCCAoygAwIBAgIGAX7/5qPhMA0GCSqGSIb3DQEBCwUAMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNU2FuIEZyYW5jaXNjbzENMAsGA1UECgwET2t0YTEUMBIGA1UECwwLU1NPUHJvdmlkZXIxEzARBgNVBAMMCmRldi00NTg0MjExHDAaBgkqhkiG9w0BCQEWDWluZm9Ab2t0YS5jb20wHhcNMjIwMjE2MDAxOTEyWhcNMzIwMjE2MDAyMDEyWjCBkjELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDVNhbiBGcmFuY2lzY28xDTALBgNVBAoMBE9rdGExFDASBgNVBAsMC1NTT1Byb3ZpZGVyMRMwEQYDVQQDDApkZXYtNDU4NDIxMRwwGgYJKoZIhvcNAQkBFg1pbmZvQG9rdGEuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxrBl7GKz52cRpxF9xCsirnRuMxnhFBaUrsHqAQrLqWmdlpNYZTVg+T9iQ+aq/iE68L+BRZcZniKIvW58wqqS0ltXVvIkXuDSvnvnkkI5yMIVErR20K8jSOKQm1FmK+fgAJ4koshFiu9oLiqu0Ejc0DuL3/XRsb4RuxjktKTb1khgBBtb+7idEk0sFR0RPefAweXImJkDHDm7SxjDwGJUubbqpdTxasPr0W+AHI1VUzsUsTiHAoyb0XDkYqHfDzhj/ZdIEl4zHQ3bEZvlD984ztAnmX2SuFLLKfXeAAGHei8MMixJvwxYkkPeYZ/5h8WgBZPP4heS2CPjwYExt29L8QIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQARjJFz++a9Z5IQGFzsZMrX2EDR5ML4xxUiQkbhld1S1PljOLcYFARDmUC2YYHOueU4ee8Jid9nPGEUebV/4Jok+b+oQh+dWMgiWjSLI7h5q4OYZ3VJtdlVwgMFt2iz+/4yBKMUZ50g3Qgg36vE34us+eKitg759JgCNsibxn0qtJgSPm0sgP2L6yTaLnoEUbXBRxCwynTSkp9ZijZqEzbhN0e2dWv7Rx/nfpohpDP6vEiFImKFHpDSv3M/5de1ytQzPFrZBYt9WlzlYwE1aD9FHCxdd+rWgYMVVoRaRmndpV/Rq3QUuDuFJtaoX11bC7ExkOpg9KstZzA63i3VcfYv" + } +} +`, context) +} + + +func testAccIAMWorkforcePoolWorkforcePoolProvider_extraAttributesOauth2Client_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "assertion.sub" + } + oidc { + issuer_uri = "https://sts.windows.net/826602fe-2101-470c-9d71-ee1343668989/" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + additional_scopes = ["groups", "roles"] + } + } + extra_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "client-id" + client_secret { + value { + plain_text = "client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_MAIL" + query_parameters { + filter = "mail:gcp" + } + } + display_name = "Display name" + description = "A sample OIDC workforce pool provider." + disabled = false + attribute_condition = "true" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolProvider_extraAttributesOauth2Client_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + oidc { + issuer_uri = "https://sts.windows.net/826602fe-2101-470c-9d71-ee1343668989/" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + additional_scopes = ["groups", "roles"] + } + } + extra_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0/" + client_id = "new-client-id" + client_secret { + value { + plain_text = "new-client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_MAIL" + query_parameters { + filter = "displayName:gcp" + } + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolProvider_extraAttributesOauth2Client_update_clearConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + oidc { + issuer_uri = "https://sts.windows.net/826602fe-2101-470c-9d71-ee1343668989/" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + additional_scopes = ["groups", "roles"] + } + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolProvider_extraAttributesOauth2Client_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} + +resource "google_iam_workforce_pool_provider" "my_provider" { + workforce_pool_id = google_iam_workforce_pool.my_pool.workforce_pool_id + location = google_iam_workforce_pool.my_pool.location + provider_id = "my-provider-%{random_suffix}" + attribute_mapping = { + "google.subject" = "false" + } + oidc { + issuer_uri = "https://sts.windows.net/826602fe-2101-470c-9d71-ee1343668989/" + client_id = "https://analysis.windows.net/powerbi/connector/GoogleBigQuery" + client_secret { + value { + plain_text = "client-secret" + } + } + web_sso_config { + response_type = "CODE" + assertion_claims_behavior = "MERGE_USER_INFO_OVER_ID_TOKEN_CLAIMS" + additional_scopes = ["groups", "roles"] + } + } + extra_attributes_oauth2_client { + issuer_uri = "https://login.microsoftonline.com/826602fe-2101-470c-9d71-ee1343668989/v2.0" + client_id = "client-id" + client_secret { + value { + plain_text = "client-secret" + } + } + attributes_type = "AZURE_AD_GROUPS_MAIL" + } + display_name = "New Display name" + description = "A sample OIDC workforce pool provider with updated description." + disabled = true + attribute_condition = "false" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePoolProvider_destroy(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_test.go new file mode 100644 index 000000000000..62cf132b5941 --- /dev/null +++ b/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_test.go @@ -0,0 +1,138 @@ +package iamworkforcepool_test + +import ( + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccIAMWorkforcePoolWorkforcePool_full(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMWorkforcePoolWorkforcePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMWorkforcePoolWorkforcePool_full(context), + }, + { + ResourceName: "google_iam_workforce_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePool_full_update(context), + }, + { + ResourceName: "google_iam_workforce_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccIAMWorkforcePoolWorkforcePool_minimal(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckIAMWorkforcePoolWorkforcePoolDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccIAMWorkforcePoolWorkforcePool_minimal(context), + }, + { + ResourceName: "google_iam_workforce_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccIAMWorkforcePoolWorkforcePool_minimal_update(context), + }, + { + ResourceName: "google_iam_workforce_pool.my_pool", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccIAMWorkforcePoolWorkforcePool_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" + display_name = "Display name" + description = "A sample workforce pool." + disabled = false + session_duration = "7200s" + access_restrictions { + allowed_services { + domain = "backstory.chronicle.security" + } + disable_programmatic_signin = false + } +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePool_minimal(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePool_full_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" + display_name = "New display name" + description = "A sample workforce pool with updated description." + disabled = true + session_duration = "3600s" + access_restrictions { + allowed_services { + domain = "backstory.chronicle.security" + } + disable_programmatic_signin = false + } +} +`, context) +} + +func testAccIAMWorkforcePoolWorkforcePool_minimal_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_iam_workforce_pool" "my_pool" { + workforce_pool_id = "my-pool-%{random_suffix}" + parent = "organizations/%{org_id}" + location = "global" + display_name = "New display name" + description = "A sample workforce pool with updated description." + disabled = true + session_duration = "3600s" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_workforce_pool_id_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_workforce_pool_id_test.go new file mode 100644 index 000000000000..bb24645a4539 --- /dev/null +++ b/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_workforce_pool_id_test.go @@ -0,0 +1,35 @@ +package iamworkforcepool_test + +import ( + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/services/iamworkforcepool" + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +func TestValidateIAMWorkforcePoolWorkforcePoolId(t *testing.T) { + x := []verify.StringValidationTestCase{ + // No errors + {TestName: "with numbers", Value: "foobar123"}, + {TestName: "short", Value: "foobar"}, + {TestName: "long", Value: strings.Repeat("f", 63)}, + {TestName: "has a hyphen", Value: "foo-bar"}, + + // With errors + {TestName: "empty", Value: "", ExpectError: true}, + {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, + {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, + {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, + {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, + {TestName: "too short", Value: "foooo", ExpectError: true}, + {TestName: "too long", Value: strings.Repeat("f", 64), ExpectError: true}, + {TestName: "doesn't start with a lowercase letter", Value: "123foo", ExpectError: true}, + {TestName: "ends with a hyphen", Value: "foobar-", ExpectError: true}, + } + + es := verify.TestStringValidationCases(x, iamworkforcepool.ValidateWorkforcePoolId) + if len(es) > 0 { + t.Errorf("Failed to validate WorkforcePool names: %v", es) + } +} diff --git a/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_workforce_pool_provider_id_test.go b/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_workforce_pool_provider_id_test.go new file mode 100644 index 000000000000..c8425098357e --- /dev/null +++ b/mmv1/third_party/terraform/services/iamworkforcepool/go/resource_iam_workforce_pool_workforce_pool_provider_id_test.go @@ -0,0 +1,33 @@ +package iamworkforcepool_test + +import ( + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/services/iamworkforcepool" + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +func TestValidateIAMWorkforcePoolWorkforcePoolProviderId(t *testing.T) { + x := []verify.StringValidationTestCase{ + // No errors + {TestName: "with numbers", Value: "foobar123"}, + {TestName: "short", Value: "foo-"}, + {TestName: "long", Value: strings.Repeat("f", 32)}, + {TestName: "has a hyphen", Value: "foo-bar"}, + + // With errors + {TestName: "empty", Value: "", ExpectError: true}, + {TestName: "starts with a gcp-", Value: "gcp-foobar", ExpectError: true}, + {TestName: "with uppercase", Value: "fooBar", ExpectError: true}, + {TestName: "has an slash", Value: "foo/bar", ExpectError: true}, + {TestName: "has an backslash", Value: "foo\bar", ExpectError: true}, + {TestName: "too short", Value: "foo", ExpectError: true}, + {TestName: "too long", Value: strings.Repeat("f", 33), ExpectError: true}, + } + + es := verify.TestStringValidationCases(x, iamworkforcepool.ValidateWorkforcePoolProviderId) + if len(es) > 0 { + t.Errorf("Failed to validate WorkforcePoolProvider names: %v", es) + } +} diff --git a/mmv1/third_party/terraform/services/kms/go/data_source_google_kms_secret_asymmetric.go.tmpl b/mmv1/third_party/terraform/services/kms/go/data_source_google_kms_secret_asymmetric.go.tmpl new file mode 100644 index 000000000000..e43bd4cbcd6f --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/go/data_source_google_kms_secret_asymmetric.go.tmpl @@ -0,0 +1,156 @@ +package kms + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "context" + "encoding/base64" + "fmt" + "hash/crc32" + "regexp" + "strconv" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "google.golang.org/api/cloudkms/v1" +) + +var ( + cryptoKeyVersionRegexp = regexp.MustCompile(`^(//[^/]*/[^/]*/)?(projects/[^/]+/locations/[^/]+/keyRings/[^/]+/cryptoKeys/[^/]+/cryptoKeyVersions/[^/]+)$`) +) + +func DataSourceGoogleKmsSecretAsymmetric() *schema.Resource { + return &schema.Resource{ + ReadContext: dataSourceGoogleKmsSecretAsymmetricReadContext, + Schema: map[string]*schema.Schema{ + "crypto_key_version": { + Type: schema.TypeString, + Description: "The fully qualified KMS crypto key version name", + ValidateFunc: verify.ValidateRegexp(cryptoKeyVersionRegexp.String()), + Required: true, + }, + "ciphertext": { + Type: schema.TypeString, + Description: "The public key encrypted ciphertext in base64 encoding", + ValidateFunc: validateBase64WithWhitespaces, + Required: true, + }, + "crc32": { + Type: schema.TypeString, + Description: "The crc32 checksum of the ciphertext, hexadecimal encoding. If not specified, it will be computed", + ValidateFunc: validateHexadecimalUint32, + Optional: true, + }, + "plaintext": { + Type: schema.TypeString, + Computed: true, + Sensitive: true, + }, + }, + } +} + +func dataSourceGoogleKmsSecretAsymmetricReadContext(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { + var diags diag.Diagnostics + + err := dataSourceGoogleKmsSecretAsymmetricRead(ctx, d, meta) + if err != nil { + diags = diag.FromErr(err) + } + return diags +} + +func dataSourceGoogleKmsSecretAsymmetricRead(ctx context.Context, d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + // `google_kms_crypto_key_version` returns an id with the prefix + // //cloudkms.googleapis.com/v1, which is an invalid name. To allow for the most elegant + // configuration, we will allow it as an input. + keyVersion := cryptoKeyVersionRegexp.FindStringSubmatch(d.Get("crypto_key_version").(string)) + cryptoKeyVersion := keyVersion[len(keyVersion)-1] + + base64CipherText := removeWhiteSpaceFromString(d.Get("ciphertext").(string)) + ciphertext, err := base64.StdEncoding.DecodeString(base64CipherText) + if err != nil { + return err + } + + crc32c := func(data []byte) uint32 { + t := crc32.MakeTable(crc32.Castagnoli) + return crc32.Checksum(data, t) + } + + ciphertextCRC32C := crc32c(ciphertext) + if s, ok := d.Get("crc32").(string); ok && s != "" { + u, err := strconv.ParseUint(s, 16, 32) + if err != nil { + return fmt.Errorf("failed to convert crc32 into uint32, %s", err) + } + ciphertextCRC32C = uint32(u) + } else { + if err := d.Set("crc32", fmt.Sprintf("%x", ciphertextCRC32C)); err != nil { + return fmt.Errorf("failed to set crc32, %s", err) + } + } + + req := cloudkms.AsymmetricDecryptRequest{ + Ciphertext: base64CipherText, + CiphertextCrc32c: int64(ciphertextCRC32C)} + + client := config.NewKmsClientWithCtx(ctx, userAgent) + if client == nil { + return fmt.Errorf("failed to get a KMS client") + } + + result, err := client.Projects.Locations.KeyRings.CryptoKeys.CryptoKeyVersions.AsymmetricDecrypt(cryptoKeyVersion, &req).Do() + if err != nil { + return fmt.Errorf("failed to decrypt ciphertext: %v", err) + } + plaintext, err := base64.StdEncoding.DecodeString(result.Plaintext) + if err != nil { + return fmt.Errorf("failed to base64 decode plaintext: %v", err) + } + + plaintextCrc32c := int64(crc32c(plaintext)) + if !result.VerifiedCiphertextCrc32c || plaintextCrc32c != result.PlaintextCrc32c { + return fmt.Errorf("asymmetricDecrypt response corrupted in-transit, got %x, expected %x", + plaintextCrc32c, result.PlaintextCrc32c) + } + + if err := d.Set("plaintext", string(plaintext)); err != nil { + return fmt.Errorf("error setting plaintext: %s", err) + } + + d.SetId(fmt.Sprintf("%s:%x:%s", cryptoKeyVersion, ciphertextCRC32C, base64CipherText)) + return nil +} + +func removeWhiteSpaceFromString(s string) string { + whitespaceRegexp := regexp.MustCompile(`(?m)[\s]+`) + return whitespaceRegexp.ReplaceAllString(s, "") +} + +func validateBase64WithWhitespaces(i interface{}, val string) ([]string, []error) { + _, err := base64.StdEncoding.DecodeString(removeWhiteSpaceFromString(i.(string))) + if err != nil { + return nil, []error{fmt.Errorf("could not decode %q as a valid base64 value. Please use the terraform base64 functions such as base64encode() or filebase64() to supply a valid base64 string", val)} + } + return nil, nil +} + +func validateHexadecimalUint32(i interface{}, val string) ([]string, []error) { + _, err := strconv.ParseUint(i.(string), 16, 32) + if err != nil { + return nil, []error{fmt.Errorf("could not decode %q as a unsigned 32 bit hexadecimal integer", val)} + } + return nil, nil +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/kms/go/data_source_google_kms_secret_asymmetric_test.go.tmpl b/mmv1/third_party/terraform/services/kms/go/data_source_google_kms_secret_asymmetric_test.go.tmpl new file mode 100644 index 000000000000..e27f012190d4 --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/go/data_source_google_kms_secret_asymmetric_test.go.tmpl @@ -0,0 +1,159 @@ +package kms_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "crypto/x509" + "encoding/base64" + "encoding/pem" + "fmt" + "hash/crc32" + "log" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccKmsSecretAsymmetricBasic(t *testing.T) { + // Nested tests confuse VCR + acctest.SkipIfVcr(t) + t.Parallel() + + projectOrg := envvar.GetTestOrgFromEnv(t) + projectBillingAccount := envvar.GetTestBillingAccountFromEnv(t) + + projectID := "tf-test-" + acctest.RandString(t, 10) + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + plaintext := fmt.Sprintf("secret-%s", acctest.RandString(t, 10)) + + // The first test creates resources needed to encrypt plaintext and produce ciphertext + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: kmsCryptoKeyAsymmetricDecryptBasic(projectID, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName), + Check: func(s *terraform.State) error { + ciphertext, cryptoKeyVersionID, crc, err := testAccEncryptSecretDataAsymmetricWithPublicKey(t, s, "data.google_kms_crypto_key_version.crypto_key", plaintext) + if err != nil { + return err + } + + // The second test asserts that the data source has the correct plaintext, given the created ciphertext + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: googleKmsSecretAsymmetricDatasource(cryptoKeyVersionID, ciphertext), + Check: resource.TestCheckResourceAttr("data.google_kms_secret_asymmetric.acceptance", "plaintext", plaintext), + }, + { + Config: googleKmsSecretAsymmetricDatasourceWithCrc(cryptoKeyVersionID, ciphertext, crc), + Check: resource.TestCheckResourceAttr("data.google_kms_secret_asymmetric.acceptance_with_crc", "plaintext", plaintext), + }, + }, + }) + + return nil + }, + }, + }, + }) +} + +func testAccEncryptSecretDataAsymmetricWithPublicKey(t *testing.T, s *terraform.State, cryptoKeyResourceName, plaintext string) (string, string, uint32, error) { + rs, ok := s.RootModule().Resources[cryptoKeyResourceName] + if !ok { + return "", "", 0, fmt.Errorf("resource not found: %s", cryptoKeyResourceName) + } + + cryptoKeyVersionID := rs.Primary.Attributes["id"] + + block, _ := pem.Decode([]byte(rs.Primary.Attributes["public_key.0.pem"])) + publicKey, err := x509.ParsePKIXPublicKey(block.Bytes) + if err != nil { + return "", "", 0, fmt.Errorf("failed to parse public key: %v", err) + } + rsaKey, ok := publicKey.(*rsa.PublicKey) + if !ok { + return "", "", 0, fmt.Errorf("public key is not rsa") + } + + ciphertext, err := rsa.EncryptOAEP(sha256.New(), rand.Reader, rsaKey, []byte(plaintext), nil) + if err != nil { + return "", "", 0, fmt.Errorf("rsa.EncryptOAEP: %v", err) + } + + crc := crc32.Checksum(ciphertext, crc32.MakeTable(crc32.Castagnoli)) + + result := base64.StdEncoding.EncodeToString(ciphertext) + log.Printf("[INFO] Successfully encrypted plaintext and got ciphertext: %s", result) + + return result, cryptoKeyVersionID, crc, nil +} + +func googleKmsSecretAsymmetricDatasource(cryptoKeyTerraformID, ciphertext string) string { + return fmt.Sprintf(` +data "google_kms_secret_asymmetric" "acceptance" { + crypto_key_version = "%s" + ciphertext = "%s" +} +`, cryptoKeyTerraformID, ciphertext) +} + +func googleKmsSecretAsymmetricDatasourceWithCrc(cryptoKeyTerraformID, ciphertext string, crc uint32) string { + return fmt.Sprintf(` +data "google_kms_secret_asymmetric" "acceptance_with_crc" { + crypto_key_version = "%s" + ciphertext = "%s" + crc32 = "%x" +} +`, cryptoKeyTerraformID, ciphertext, crc) +} + +func kmsCryptoKeyAsymmetricDecryptBasic(projectID, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName string) string { + return fmt.Sprintf(` +resource "google_project" "acceptance" { + name = "%s" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "acceptance" { + project = google_project.acceptance.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.acceptance.project + name = "%s" + location = "us-central1" + depends_on = [google_project_service.acceptance] +} + +resource "google_kms_crypto_key" "crypto_key" { + name = "%s" + key_ring = google_kms_key_ring.key_ring.id + purpose = "ASYMMETRIC_DECRYPT" + version_template { + algorithm = "RSA_DECRYPT_OAEP_4096_SHA256" + } +} + +data "google_kms_crypto_key_version" "crypto_key" { + crypto_key = google_kms_crypto_key.crypto_key.id +} +`, projectID, projectID, projectOrg, projectBillingAccount, keyRingName, cryptoKeyName) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/kms/go/iam_kms_crypto_key.go.tmpl b/mmv1/third_party/terraform/services/kms/go/iam_kms_crypto_key.go.tmpl new file mode 100644 index 000000000000..9b765b23ffe9 --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/go/iam_kms_crypto_key.go.tmpl @@ -0,0 +1,110 @@ +package kms + +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgiamresource" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "google.golang.org/api/cloudkms/v1" + "google.golang.org/api/cloudresourcemanager/v1" +) + +var IamKmsCryptoKeySchema = map[string]*schema.Schema{ + "crypto_key_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, +} + +type KmsCryptoKeyIamUpdater struct { + resourceId string + d tpgresource.TerraformResourceData + Config *transport_tpg.Config +} + +func NewKmsCryptoKeyIamUpdater(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgiamresource.ResourceIamUpdater, error) { + cryptoKey := d.Get("crypto_key_id").(string) + cryptoKeyId, err := ParseKmsCryptoKeyId(cryptoKey, config) + + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("Error parsing resource ID for %s: {{"{{"}}err{{"}}"}}", cryptoKey), err) + } + + return &KmsCryptoKeyIamUpdater{ + resourceId: cryptoKeyId.CryptoKeyId(), + d: d, + Config: config, + }, nil +} + +func CryptoIdParseFunc(d *schema.ResourceData, config *transport_tpg.Config) error { + cryptoKeyId, err := ParseKmsCryptoKeyId(d.Id(), config) + if err != nil { + return err + } + if err := d.Set("crypto_key_id", cryptoKeyId.CryptoKeyId()); err != nil { + return fmt.Errorf("Error setting crypto_key_id: %s", err) + } + d.SetId(cryptoKeyId.CryptoKeyId()) + return nil +} + +func (u *KmsCryptoKeyIamUpdater) GetResourceIamPolicy() (*cloudresourcemanager.Policy, error) { + userAgent, err := tpgresource.GenerateUserAgentString(u.d, u.Config.UserAgent) + if err != nil { + return nil, err + } + + p, err := u.Config.NewKmsClient(userAgent).Projects.Locations.KeyRings.CryptoKeys.GetIamPolicy(u.resourceId).OptionsRequestedPolicyVersion(tpgiamresource.IamPolicyVersion).Do() + + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("Error retrieving IAM policy for %s: {{"{{"}}err{{"}}"}}", u.DescribeResource()), err) + } + + cloudResourcePolicy, err := kmsToResourceManagerPolicy(p) + + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("Invalid IAM policy for %s: {{"{{"}}err{{"}}"}}", u.DescribeResource()), err) + } + + return cloudResourcePolicy, nil +} + +func (u *KmsCryptoKeyIamUpdater) SetResourceIamPolicy(policy *cloudresourcemanager.Policy) error { + userAgent, err := tpgresource.GenerateUserAgentString(u.d, u.Config.UserAgent) + if err != nil { + return err + } + + kmsPolicy, err := resourceManagerToKmsPolicy(policy) + + if err != nil { + return errwrap.Wrapf(fmt.Sprintf("Invalid IAM policy for %s: {{"{{"}}err{{"}}"}}", u.DescribeResource()), err) + } + + _, err = u.Config.NewKmsClient(userAgent).Projects.Locations.KeyRings.CryptoKeys.SetIamPolicy(u.resourceId, &cloudkms.SetIamPolicyRequest{ + Policy: kmsPolicy, + }).Do() + + if err != nil { + return errwrap.Wrapf(fmt.Sprintf("Error setting IAM policy for %s: {{"{{"}}err{{"}}"}}", u.DescribeResource()), err) + } + + return nil +} + +func (u *KmsCryptoKeyIamUpdater) GetResourceId() string { + return u.resourceId +} + +func (u *KmsCryptoKeyIamUpdater) GetMutexKey() string { + return fmt.Sprintf("iam-kms-crypto-key-%s", u.resourceId) +} + +func (u *KmsCryptoKeyIamUpdater) DescribeResource() string { + return fmt.Sprintf("KMS CryptoKey %q", u.resourceId) +} diff --git a/mmv1/third_party/terraform/services/kms/go/iam_kms_crypto_key_test.go.tmpl b/mmv1/third_party/terraform/services/kms/go/iam_kms_crypto_key_test.go.tmpl new file mode 100644 index 000000000000..29708dc3ba80 --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/go/iam_kms_crypto_key_test.go.tmpl @@ -0,0 +1,717 @@ +package kms_test + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/services/kms" +) + +func TestAccKmsCryptoKeyIamBinding(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyDecrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + // Test Iam Binding creation + Config: testAccKmsCryptoKeyIamBinding_basic(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId), + Check: testAccCheckGoogleKmsCryptoKeyIamBindingExists(t, "foo", roleId, []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, projectId), + }), + }, + { + ResourceName: "google_kms_crypto_key_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s/%s %s", keyRingId.TerraformId(), cryptoKeyName, roleId), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Binding update + Config: testAccKmsCryptoKeyIamBinding_update(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId), + Check: testAccCheckGoogleKmsCryptoKeyIamBindingExists(t, "foo", roleId, []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, projectId), + fmt.Sprintf("serviceAccount:%s-2@%s.iam.gserviceaccount.com", account, projectId), + }), + }, + { + ResourceName: "google_kms_crypto_key_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s/%s %s", keyRingId.TerraformId(), cryptoKeyName, roleId), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccKmsCryptoKeyIamBinding_withCondition(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyDecrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + conditionTitle := "expires_after_2019_12_31" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKmsCryptoKeyIamBinding_withCondition(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle), + }, + { + ResourceName: "google_kms_crypto_key_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s/%s %s %s", keyRingId.TerraformId(), cryptoKeyName, roleId, conditionTitle), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func TestAccKmsCryptoKeyIamMember(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyEncrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccKmsCryptoKeyIamMember_basic(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId), + Check: testAccCheckGoogleKmsCryptoKeyIamMemberExists(t, "foo", roleId, + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, projectId), + ), + }, + { + ResourceName: "google_kms_crypto_key_iam_member.foo", + ImportStateId: fmt.Sprintf("%s/%s %s serviceAccount:%s@%s.iam.gserviceaccount.com", keyRingId.TerraformId(), cryptoKeyName, roleId, account, projectId), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccKmsCryptoKeyIamMember_withCondition(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyEncrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + conditionTitle := "expires_after_2019_12_31" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKmsCryptoKeyIamMember_withCondition(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle), + }, + { + ResourceName: "google_kms_crypto_key_iam_member.foo", + ImportStateId: fmt.Sprintf("%s/%s %s serviceAccount:%s@%s.iam.gserviceaccount.com %s", keyRingId.TerraformId(), cryptoKeyName, roleId, account, projectId, conditionTitle), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func TestAccKmsCryptoKeyIamPolicy(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyEncrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKmsCryptoKeyIamPolicy_basic(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId), + Check: resource.ComposeTestCheckFunc( + testAccCheckGoogleCryptoKmsKeyIam(t, "foo", roleId, []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, projectId), + }), + resource.TestCheckResourceAttrSet("data.google_kms_crypto_key_iam_policy.foo", "policy_data"), + ), + }, + { + ResourceName: "google_kms_crypto_key_iam_policy.foo", + ImportStateId: fmt.Sprintf("%s/%s", keyRingId.TerraformId(), cryptoKeyName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccKmsCryptoKeyIamPolicy_withCondition(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyEncrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + cryptoKeyName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + conditionTitle := "expires_after_2019_12_31" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKmsCryptoKeyIamPolicy_withCondition(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle), + }, + { + ResourceName: "google_kms_crypto_key_iam_policy.foo", + ImportStateId: fmt.Sprintf("%s/%s", keyRingId.TerraformId(), cryptoKeyName), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func testAccCheckGoogleKmsCryptoKeyIamBindingExists(t *testing.T, bindingResourceName, roleId string, members []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + bindingRs, ok := s.RootModule().Resources[fmt.Sprintf("google_kms_crypto_key_iam_binding.%s", bindingResourceName)] + if !ok { + return fmt.Errorf("Not found: %s", bindingResourceName) + } + + config := acctest.GoogleProviderConfig(t) + cryptoKeyId, err := kms.ParseKmsCryptoKeyId(bindingRs.Primary.Attributes["crypto_key_id"], config) + + if err != nil { + return err + } + + p, err := config.NewKmsClient(config.UserAgent).Projects.Locations.KeyRings.CryptoKeys.GetIamPolicy(cryptoKeyId.CryptoKeyId()).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == roleId { + sort.Strings(members) + sort.Strings(binding.Members) + + if reflect.DeepEqual(members, binding.Members) { + return nil + } + + return fmt.Errorf("Binding found but expected members is %v, got %v", members, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", roleId) + } +} + +func testAccCheckGoogleKmsCryptoKeyIamMemberExists(t *testing.T, n, role, member string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources["google_kms_crypto_key_iam_member."+n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + config := acctest.GoogleProviderConfig(t) + cryptoKeyId, err := kms.ParseKmsCryptoKeyId(rs.Primary.Attributes["crypto_key_id"], config) + + if err != nil { + return err + } + + p, err := config.NewKmsClient(config.UserAgent).Projects.Locations.KeyRings.GetIamPolicy(cryptoKeyId.CryptoKeyId()).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == role { + for _, m := range binding.Members { + if m == member { + return nil + } + } + + return fmt.Errorf("Missing member %q, got %v", member, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", role) + } +} + +func testAccCheckGoogleCryptoKmsKeyIam(t *testing.T, n, role string, members []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources["google_kms_crypto_key_iam_policy."+n] + if !ok { + return fmt.Errorf("IAM policy resource not found") + } + + config := acctest.GoogleProviderConfig(t) + cryptoKeyId, err := kms.ParseKmsCryptoKeyId(rs.Primary.Attributes["crypto_key_id"], config) + + if err != nil { + return err + } + + p, err := config.NewKmsClient(config.UserAgent).Projects.Locations.KeyRings.GetIamPolicy(cryptoKeyId.CryptoKeyId()).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == role { + sort.Strings(members) + sort.Strings(binding.Members) + + if reflect.DeepEqual(members, binding.Members) { + return nil + } + + return fmt.Errorf("Binding found but expected members is %v, got %v", members, binding.Members) + } else { + return fmt.Errorf("Binding found but not expected for role: %v", binding.Role) + } + } + + return fmt.Errorf("No binding for role %q", role) + } +} + +// We are using a custom role since iam_binding is authoritative on the member list and +// we want to avoid removing members from an existing role to prevent unwanted side effects. +func testAccKmsCryptoKeyIamBinding_basic(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Crypto Key Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_crypto_key" "crypto_key" { + key_ring = google_kms_key_ring.key_ring.id + name = "%s" +} + +resource "google_kms_crypto_key_iam_binding" "foo" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "%s" + members = ["serviceAccount:${google_service_account.test_account.email}"] +} +`, projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId) +} + +func testAccKmsCryptoKeyIamBinding_update(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Crypto Key Iam Testing Account" +} + +resource "google_service_account" "test_account_2" { + project = google_project_service.iam.project + account_id = "%s-2" + display_name = "Kms Crypto Key Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_crypto_key" "crypto_key" { + key_ring = google_kms_key_ring.key_ring.id + name = "%s" +} + +resource "google_kms_crypto_key_iam_binding" "foo" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "%s" + members = [ + "serviceAccount:${google_service_account.test_account.email}", + "serviceAccount:${google_service_account.test_account_2.email}", + ] +} +`, projectId, orgId, billingAccount, account, account, keyRingName, cryptoKeyName, roleId) +} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccKmsCryptoKeyIamBinding_withCondition(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Crypto Key Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_crypto_key" "crypto_key" { + key_ring = google_kms_key_ring.key_ring.id + name = "%s" +} + +resource "google_kms_crypto_key_iam_binding" "foo" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "%s" + members = ["serviceAccount:${google_service_account.test_account.email}"] + condition { + title = "%s" + description = "Expiring at midnight of 2019-12-31" + expression = "request.time < timestamp(\"2020-01-01T00:00:00Z\")" + } +} +`, projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle) +} +{{- end }} + +func testAccKmsCryptoKeyIamMember_basic(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Crypto Key Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_crypto_key" "crypto_key" { + key_ring = google_kms_key_ring.key_ring.id + name = "%s" +} + +resource "google_kms_crypto_key_iam_member" "foo" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "%s" + member = "serviceAccount:${google_service_account.test_account.email}" +} +`, projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId) +} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccKmsCryptoKeyIamMember_withCondition(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Crypto Key Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_crypto_key" "crypto_key" { + key_ring = google_kms_key_ring.key_ring.id + name = "%s" +} + +resource "google_kms_crypto_key_iam_member" "foo" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "%s" + member = "serviceAccount:${google_service_account.test_account.email}" + condition { + title = "%s" + description = "Expiring at midnight of 2019-12-31" + expression = "request.time < timestamp(\"2020-01-01T00:00:00Z\")" + } +} +`, projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle) +} +{{- end }} + +func testAccKmsCryptoKeyIamPolicy_basic(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Crypto Key Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_crypto_key" "crypto_key" { + key_ring = google_kms_key_ring.key_ring.id + name = "%s" +} + +data "google_iam_policy" "foo" { + binding { + role = "%s" + members = ["serviceAccount:${google_service_account.test_account.email}"] + } +} + +resource "google_kms_crypto_key_iam_policy" "foo" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + policy_data = data.google_iam_policy.foo.policy_data +} + +data "google_kms_crypto_key_iam_policy" "foo" { + crypto_key_id = google_kms_crypto_key.crypto_key.id +} +`, projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId) +} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccKmsCryptoKeyIamPolicy_withCondition(projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Crypto Key Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_crypto_key" "crypto_key" { + key_ring = google_kms_key_ring.key_ring.id + name = "%s" +} + +data "google_iam_policy" "foo" { + binding { + role = "%s" + members = ["serviceAccount:${google_service_account.test_account.email}"] + condition { + title = "%s" + description = "Expiring at midnight of 2019-12-31" + expression = "request.time < timestamp(\"2020-01-01T00:00:00Z\")" + } + } +} + +resource "google_kms_crypto_key_iam_policy" "foo" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + policy_data = data.google_iam_policy.foo.policy_data +} +`, projectId, orgId, billingAccount, account, keyRingName, cryptoKeyName, roleId, conditionTitle) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/kms/go/iam_kms_key_ring.go.tmpl b/mmv1/third_party/terraform/services/kms/go/iam_kms_key_ring.go.tmpl new file mode 100644 index 000000000000..44584b043a75 --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/go/iam_kms_key_ring.go.tmpl @@ -0,0 +1,129 @@ +package kms + +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgiamresource" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "google.golang.org/api/cloudkms/v1" + "google.golang.org/api/cloudresourcemanager/v1" +) + +var IamKmsKeyRingSchema = map[string]*schema.Schema{ + "key_ring_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, +} + +type KmsKeyRingIamUpdater struct { + resourceId string + d tpgresource.TerraformResourceData + Config *transport_tpg.Config +} + +func NewKmsKeyRingIamUpdater(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (tpgiamresource.ResourceIamUpdater, error) { + keyRing := d.Get("key_ring_id").(string) + keyRingId, err := parseKmsKeyRingId(keyRing, config) + + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("Error parsing resource ID for %s: {{"{{"}}err{{"}}"}}", keyRing), err) + } + + return &KmsKeyRingIamUpdater{ + resourceId: keyRingId.KeyRingId(), + d: d, + Config: config, + }, nil +} + +func KeyRingIdParseFunc(d *schema.ResourceData, config *transport_tpg.Config) error { + keyRingId, err := parseKmsKeyRingId(d.Id(), config) + if err != nil { + return err + } + + if err := d.Set("key_ring_id", keyRingId.KeyRingId()); err != nil { + return fmt.Errorf("Error setting key_ring_id: %s", err) + } + d.SetId(keyRingId.KeyRingId()) + return nil +} + +func (u *KmsKeyRingIamUpdater) GetResourceIamPolicy() (*cloudresourcemanager.Policy, error) { + userAgent, err := tpgresource.GenerateUserAgentString(u.d, u.Config.UserAgent) + if err != nil { + return nil, err + } + + p, err := u.Config.NewKmsClient(userAgent).Projects.Locations.KeyRings.GetIamPolicy(u.resourceId).OptionsRequestedPolicyVersion(tpgiamresource.IamPolicyVersion).Do() + + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("Error retrieving IAM policy for %s: {{"{{"}}err{{"}}"}}", u.DescribeResource()), err) + } + + cloudResourcePolicy, err := kmsToResourceManagerPolicy(p) + + if err != nil { + return nil, errwrap.Wrapf(fmt.Sprintf("Invalid IAM policy for %s: {{"{{"}}err{{"}}"}}", u.DescribeResource()), err) + } + + return cloudResourcePolicy, nil +} + +func (u *KmsKeyRingIamUpdater) SetResourceIamPolicy(policy *cloudresourcemanager.Policy) error { + kmsPolicy, err := resourceManagerToKmsPolicy(policy) + + if err != nil { + return errwrap.Wrapf(fmt.Sprintf("Invalid IAM policy for %s: {{"{{"}}err{{"}}"}}", u.DescribeResource()), err) + } + + userAgent, err := tpgresource.GenerateUserAgentString(u.d, u.Config.UserAgent) + if err != nil { + return err + } + + _, err = u.Config.NewKmsClient(userAgent).Projects.Locations.KeyRings.SetIamPolicy(u.resourceId, &cloudkms.SetIamPolicyRequest{ + Policy: kmsPolicy, + }).Do() + + if err != nil { + return errwrap.Wrapf(fmt.Sprintf("Error setting IAM policy for %s: {{"{{"}}err{{"}}"}}", u.DescribeResource()), err) + } + + return nil +} + +func (u *KmsKeyRingIamUpdater) GetResourceId() string { + return u.resourceId +} + +func (u *KmsKeyRingIamUpdater) GetMutexKey() string { + return fmt.Sprintf("iam-kms-key-ring-%s", u.resourceId) +} + +func (u *KmsKeyRingIamUpdater) DescribeResource() string { + return fmt.Sprintf("KMS KeyRing %q", u.resourceId) +} + +func resourceManagerToKmsPolicy(p *cloudresourcemanager.Policy) (*cloudkms.Policy, error) { + out := &cloudkms.Policy{} + err := tpgresource.Convert(p, out) + if err != nil { + return nil, errwrap.Wrapf("Cannot convert a v1 policy to a kms policy: {{"{{"}}err{{"}}"}}", err) + } + return out, nil +} + +func kmsToResourceManagerPolicy(p *cloudkms.Policy) (*cloudresourcemanager.Policy, error) { + out := &cloudresourcemanager.Policy{} + err := tpgresource.Convert(p, out) + if err != nil { + return nil, errwrap.Wrapf("Cannot convert a kms policy to a v1 policy: {{"{{"}}err{{"}}"}}", err) + } + return out, nil +} diff --git a/mmv1/third_party/terraform/services/kms/go/iam_kms_key_ring_test.go.tmpl b/mmv1/third_party/terraform/services/kms/go/iam_kms_key_ring_test.go.tmpl new file mode 100644 index 000000000000..24ea402262c5 --- /dev/null +++ b/mmv1/third_party/terraform/services/kms/go/iam_kms_key_ring_test.go.tmpl @@ -0,0 +1,593 @@ +package kms_test + +import ( + "fmt" + "reflect" + "sort" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/services/kms" +) + +const DEFAULT_KMS_TEST_LOCATION = "us-central1" + +func TestAccKmsKeyRingIamBinding(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyDecrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + // Test Iam Binding creation + Config: testAccKmsKeyRingIamBinding_basic(projectId, orgId, billingAccount, account, keyRingName, roleId), + Check: testAccCheckGoogleKmsKeyRingIam(t, keyRingId.KeyRingId(), roleId, []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, projectId), + }), + }, + { + ResourceName: "google_kms_key_ring_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s %s", keyRingId.TerraformId(), roleId), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Binding update + Config: testAccKmsKeyRingIamBinding_update(projectId, orgId, billingAccount, account, keyRingName, roleId), + Check: testAccCheckGoogleKmsKeyRingIam(t, keyRingId.KeyRingId(), roleId, []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, projectId), + fmt.Sprintf("serviceAccount:%s-2@%s.iam.gserviceaccount.com", account, projectId), + }), + }, + { + ResourceName: "google_kms_key_ring_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s %s", keyRingId.TerraformId(), roleId), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccKmsKeyRingIamBinding_withCondition(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyDecrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + conditionTitle := "expires_after_2019_12_31" + + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKmsKeyRingIamBinding_withCondition(projectId, orgId, billingAccount, account, keyRingName, roleId, conditionTitle), + }, + { + ResourceName: "google_kms_key_ring_iam_binding.foo", + ImportStateId: fmt.Sprintf("%s %s %s", keyRingId.TerraformId(), roleId, conditionTitle), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func TestAccKmsKeyRingIamMember(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyEncrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccKmsKeyRingIamMember_basic(projectId, orgId, billingAccount, account, keyRingName, roleId), + Check: testAccCheckGoogleKmsKeyRingIam(t, keyRingId.KeyRingId(), roleId, []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, projectId), + }), + }, + { + ResourceName: "google_kms_key_ring_iam_member.foo", + ImportStateId: fmt.Sprintf("%s %s serviceAccount:%s@%s.iam.gserviceaccount.com", keyRingId.TerraformId(), roleId, account, projectId), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccKmsKeyRingIamMember_withCondition(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyEncrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + conditionTitle := "expires_after_2019_12_31" + + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKmsKeyRingIamMember_withCondition(projectId, orgId, billingAccount, account, keyRingName, roleId, conditionTitle), + }, + { + ResourceName: "google_kms_key_ring_iam_member.foo", + ImportStateId: fmt.Sprintf("%s %s serviceAccount:%s@%s.iam.gserviceaccount.com %s", keyRingId.TerraformId(), roleId, account, projectId, conditionTitle), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func TestAccKmsKeyRingIamPolicy(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyEncrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKmsKeyRingIamPolicy_basic(projectId, orgId, billingAccount, account, keyRingName, roleId), + Check: testAccCheckGoogleKmsKeyRingIam(t, keyRingId.KeyRingId(), roleId, []string{ + fmt.Sprintf("serviceAccount:%s@%s.iam.gserviceaccount.com", account, projectId), + }), + }, + { + ResourceName: "google_kms_key_ring_iam_policy.foo", + ImportStateId: keyRingId.TerraformId(), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +{{ if ne $.TargetVersionName `ga` -}} +func TestAccKmsKeyRingIamPolicy_withCondition(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + projectId := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + billingAccount := envvar.GetTestBillingAccountFromEnv(t) + account := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + roleId := "roles/cloudkms.cryptoKeyEncrypter" + keyRingName := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + conditionTitle := "expires_after_2019_12_31" + + keyRingId := &kms.KmsKeyRingId{ + Project: projectId, + Location: DEFAULT_KMS_TEST_LOCATION, + Name: keyRingName, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccKmsKeyRingIamPolicy_withCondition(projectId, orgId, billingAccount, account, keyRingName, roleId, conditionTitle), + }, + { + ResourceName: "google_kms_key_ring_iam_policy.foo", + ImportStateId: keyRingId.TerraformId(), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} +{{- end }} + +func testAccCheckGoogleKmsKeyRingIam(t *testing.T, keyRingId, role string, members []string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + p, err := config.NewKmsClient(config.UserAgent).Projects.Locations.KeyRings.GetIamPolicy(keyRingId).Do() + if err != nil { + return err + } + + for _, binding := range p.Bindings { + if binding.Role == role { + sort.Strings(members) + sort.Strings(binding.Members) + + if reflect.DeepEqual(members, binding.Members) { + return nil + } + + return fmt.Errorf("Binding found but expected members is %v, got %v", members, binding.Members) + } + } + + return fmt.Errorf("No binding for role %q", role) + } +} + +// We are using a custom role since iam_binding is authoritative on the member list and +// we want to avoid removing members from an existing role to prevent unwanted side effects. +func testAccKmsKeyRingIamBinding_basic(projectId, orgId, billingAccount, account, keyRingName, roleId string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Key Ring Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_key_ring_iam_binding" "foo" { + key_ring_id = google_kms_key_ring.key_ring.id + role = "%s" + members = ["serviceAccount:${google_service_account.test_account.email}"] +} +`, projectId, orgId, billingAccount, account, keyRingName, roleId) +} + +func testAccKmsKeyRingIamBinding_update(projectId, orgId, billingAccount, account, keyRingName, roleId string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Key Ring Iam Testing Account" +} + +resource "google_service_account" "test_account_2" { + project = google_project_service.iam.project + account_id = "%s-2" + display_name = "Kms Key Ring Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "%s" + name = "%s" +} + +resource "google_kms_key_ring_iam_binding" "foo" { + key_ring_id = google_kms_key_ring.key_ring.id + role = "%s" + members = [ + "serviceAccount:${google_service_account.test_account.email}", + "serviceAccount:${google_service_account.test_account_2.email}", + ] +} +`, projectId, orgId, billingAccount, account, account, DEFAULT_KMS_TEST_LOCATION, keyRingName, roleId) +} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccKmsKeyRingIamBinding_withCondition(projectId, orgId, billingAccount, account, keyRingName, roleId, conditionTitle string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Key Ring Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "us-central1" + name = "%s" +} + +resource "google_kms_key_ring_iam_binding" "foo" { + key_ring_id = google_kms_key_ring.key_ring.id + role = "%s" + members = ["serviceAccount:${google_service_account.test_account.email}"] + condition { + title = "%s" + description = "Expiring at midnight of 2019-12-31" + expression = "request.time < timestamp(\"2020-01-01T00:00:00Z\")" + } +} +`, projectId, orgId, billingAccount, account, keyRingName, roleId, conditionTitle) +} +{{- end }} + +func testAccKmsKeyRingIamMember_basic(projectId, orgId, billingAccount, account, keyRingName, roleId string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Key Ring Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "%s" + name = "%s" +} + +resource "google_kms_key_ring_iam_member" "foo" { + key_ring_id = google_kms_key_ring.key_ring.id + role = "%s" + member = "serviceAccount:${google_service_account.test_account.email}" +} +`, projectId, orgId, billingAccount, account, DEFAULT_KMS_TEST_LOCATION, keyRingName, roleId) +} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccKmsKeyRingIamMember_withCondition(projectId, orgId, billingAccount, account, keyRingName, roleId, conditionTitle string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Key Ring Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "%s" + name = "%s" +} + +resource "google_kms_key_ring_iam_member" "foo" { + key_ring_id = google_kms_key_ring.key_ring.id + role = "%s" + member = "serviceAccount:${google_service_account.test_account.email}" + condition { + title = "%s" + description = "Expiring at midnight of 2019-12-31" + expression = "request.time < timestamp(\"2020-01-01T00:00:00Z\")" + } +} +`, projectId, orgId, billingAccount, account, DEFAULT_KMS_TEST_LOCATION, keyRingName, roleId, conditionTitle) +} +{{- end }} + +func testAccKmsKeyRingIamPolicy_basic(projectId, orgId, billingAccount, account, keyRingName, roleId string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Key Ring Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "%s" + name = "%s" +} + +data "google_iam_policy" "foo" { + binding { + role = "%s" + + members = ["serviceAccount:${google_service_account.test_account.email}"] + } +} + +resource "google_kms_key_ring_iam_policy" "foo" { + key_ring_id = google_kms_key_ring.key_ring.id + policy_data = data.google_iam_policy.foo.policy_data +} +`, projectId, orgId, billingAccount, account, DEFAULT_KMS_TEST_LOCATION, keyRingName, roleId) +} + +{{ if ne $.TargetVersionName `ga` -}} +func testAccKmsKeyRingIamPolicy_withCondition(projectId, orgId, billingAccount, account, keyRingName, roleId, conditionTitle string) string { + return fmt.Sprintf(` +resource "google_project" "test_project" { + name = "Test project" + project_id = "%s" + org_id = "%s" + billing_account = "%s" +} + +resource "google_project_service" "kms" { + project = google_project.test_project.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_project_service" "iam" { + project = google_project_service.kms.project + service = "iam.googleapis.com" +} + +resource "google_service_account" "test_account" { + project = google_project_service.iam.project + account_id = "%s" + display_name = "Kms Key Ring Iam Testing Account" +} + +resource "google_kms_key_ring" "key_ring" { + project = google_project_service.iam.project + location = "%s" + name = "%s" +} + +data "google_iam_policy" "foo" { + binding { + role = "%s" + + members = ["serviceAccount:${google_service_account.test_account.email}"] + condition { + title = "%s" + description = "Expiring at midnight of 2019-12-31" + expression = "request.time < timestamp(\"2020-01-01T00:00:00Z\")" + } + } +} + +resource "google_kms_key_ring_iam_policy" "foo" { + key_ring_id = google_kms_key_ring.key_ring.id + policy_data = data.google_iam_policy.foo.policy_data +} +`, projectId, orgId, billingAccount, account, DEFAULT_KMS_TEST_LOCATION, keyRingName, roleId, conditionTitle) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/logging/resource_logging_bucket_config.go b/mmv1/third_party/terraform/services/logging/resource_logging_bucket_config.go index 78258975e396..6aa57eb95346 100644 --- a/mmv1/third_party/terraform/services/logging/resource_logging_bucket_config.go +++ b/mmv1/third_party/terraform/services/logging/resource_logging_bucket_config.go @@ -93,6 +93,7 @@ See [Enabling CMEK for Logging Buckets](https://cloud.google.com/logging/docs/ro Type: schema.TypeSet, MaxItems: 20, Optional: true, + Computed: true, Description: `A list of indexed fields and related configuration data.`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ diff --git a/mmv1/third_party/terraform/services/managedkafka/go/resource_managed_kafka_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/managedkafka/go/resource_managed_kafka_cluster_test.go.tmpl new file mode 100644 index 000000000000..3b4edebc8d56 --- /dev/null +++ b/mmv1/third_party/terraform/services/managedkafka/go/resource_managed_kafka_cluster_test.go.tmpl @@ -0,0 +1,110 @@ +package managedkafka_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccManagedKafkaCluster_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckManagedKafkaClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccManagedKafkaCluster_basic(context), + }, + { + ResourceName: "google_managed_kafka_cluster.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster_id", "labels", "location", "terraform_labels"}, + }, + { + Config: testAccManagedKafkaCluster_update(context), + }, + { + ResourceName: "google_managed_kafka_cluster.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster_id", "labels", "location", "terraform_labels"}, + }, + }, + }) +} + +func testAccManagedKafkaCluster_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_managed_kafka_cluster" "example" { + cluster_id = "tf-test-my-cluster%{random_suffix}" + location = "us-central1" + capacity_config { + vcpu_count = 3 + memory_bytes = 3221225472 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } + rebalance_config { + mode = "NO_REBALANCE" + } + labels = { + key = "value" + } + + provider = google-beta +} + +data "google_project" "project" { + provider = google-beta +} +`, context) +} + +func testAccManagedKafkaCluster_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_managed_kafka_cluster" "example" { + cluster_id = "tf-test-my-cluster%{random_suffix}" + location = "us-central1" + capacity_config { + vcpu_count = 4 + memory_bytes = 4512135122 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } + rebalance_config { + mode = "AUTO_REBALANCE_ON_SCALE_UP" + } + labels = { + key = "new-value" + } + + provider = google-beta +} + +data "google_project" "project" { + provider = google-beta +} +`, context) +} +{{- else }} +// Magic Modules doesn't let us remove files - blank out beta-only common-compile files for now. +{{- end }} diff --git a/mmv1/third_party/terraform/services/managedkafka/go/resource_managed_kafka_topic_test.go.tmpl b/mmv1/third_party/terraform/services/managedkafka/go/resource_managed_kafka_topic_test.go.tmpl new file mode 100644 index 000000000000..b739ada2c4d6 --- /dev/null +++ b/mmv1/third_party/terraform/services/managedkafka/go/resource_managed_kafka_topic_test.go.tmpl @@ -0,0 +1,124 @@ +package managedkafka_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccManagedKafkaTopic_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckManagedKafkaTopicDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccManagedKafkaTopic_basic(context), + }, + { + ResourceName: "google_managed_kafka_topic.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster", "location", "topic_id"}, + }, + { + Config: testAccManagedKafkaTopic_update(context), + }, + { + ResourceName: "google_managed_kafka_topic.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"cluster", "location", "topic_id"}, + }, + }, + }) +} + +func testAccManagedKafkaTopic_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_managed_kafka_cluster" "example" { + cluster_id = "tf-test-my-cluster%{random_suffix}" + location = "us-central1" + capacity_config { + vcpu_count = 3 + memory_bytes = 3221225472 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } + + provider = google-beta +} + +resource "google_managed_kafka_topic" "example" { + cluster = google_managed_kafka_cluster.example.cluster_id + topic_id = "tf-test-my-topic%{random_suffix}" + location = "us-central1" + partition_count = 2 + replication_factor = 3 + configs = { + "cleanup.policy" = "compact" + } + + provider = google-beta +} + +data "google_project" "project" { + provider = google-beta +} +`, context) +} + +func testAccManagedKafkaTopic_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_managed_kafka_cluster" "example" { + cluster_id = "tf-test-my-cluster%{random_suffix}" + location = "us-central1" + capacity_config { + vcpu_count = 3 + memory_bytes = 3221225472 + } + gcp_config { + access_config { + network_configs { + subnet = "projects/${data.google_project.project.number}/regions/us-central1/subnetworks/default" + } + } + } + + provider = google-beta +} + +resource "google_managed_kafka_topic" "example" { + cluster = google_managed_kafka_cluster.example.cluster_id + topic_id = "tf-test-my-topic%{random_suffix}" + location = "us-central1" + partition_count = 3 + replication_factor = 3 + configs = { + "cleanup.policy" = "compact" + } + + provider = google-beta +} + +data "google_project" "project" { + provider = google-beta +} +`, context) +} +{{- else }} +// Magic Modules doesn't let us remove files - blank out beta-only common-compile files for now. +{{- end }} diff --git a/mmv1/third_party/terraform/services/networkconnectivity/go/resource_network_connectivity_hub_sweeper.go.tmpl b/mmv1/third_party/terraform/services/networkconnectivity/go/resource_network_connectivity_hub_sweeper.go.tmpl new file mode 100644 index 000000000000..2477809b4a93 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkconnectivity/go/resource_network_connectivity_hub_sweeper.go.tmpl @@ -0,0 +1,126 @@ +package networkconnectivity + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/sweeper" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func init() { + sweeper.AddTestSweepers("NetworkConnectivityHub", testSweepNetworkConnectivityHub) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepNetworkConnectivityHub(region string) error { + resourceName := "NetworkConnectivityHub" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sweeper.SharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := envvar.GetTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://networkconnectivity.googleapis.com/v1/projects/{{"{{"}}project{{"}}"}}/locations/global/hubs", "?")[0] + listUrl, err := tpgresource.ReplaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: listUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["hubs"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + return nil + } + + name := tpgresource.GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !sweeper.IsSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://networkconnectivity.googleapis.com/v1/projects/{{"{{"}}project{{"}}"}}/locations/global/hubs/{{"{{"}}name{{"}}"}}" + deleteUrl, err := tpgresource.ReplaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/networkconnectivity/go/resource_network_connectivity_spoke_sweeper.go.tmpl b/mmv1/third_party/terraform/services/networkconnectivity/go/resource_network_connectivity_spoke_sweeper.go.tmpl new file mode 100644 index 000000000000..2ab46c9eef8d --- /dev/null +++ b/mmv1/third_party/terraform/services/networkconnectivity/go/resource_network_connectivity_spoke_sweeper.go.tmpl @@ -0,0 +1,126 @@ +package networkconnectivity + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/sweeper" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func init() { + sweeper.AddTestSweepers("NetworkConnectivitySpoke", testSweepNetworkConnectivitySpoke) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepNetworkConnectivitySpoke(region string) error { + resourceName := "NetworkConnectivitySpoke" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sweeper.SharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := envvar.GetTestBillingAccountFromEnv(t) + + // Setup variables to replace in list template + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": region, + "location": region, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://networkconnectivity.googleapis.com/v1/projects/{{"{{"}}project{{"}}"}}/locations/global/spokes", "?")[0] + listUrl, err := tpgresource.ReplaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + return nil + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: listUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + return nil + } + + resourceList, ok := res["spokes"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + return nil + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + return nil + } + + name := tpgresource.GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !sweeper.IsSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://networkconnectivity.googleapis.com/v1/projects/{{"{{"}}project{{"}}"}}/locations/global/spokes/{{"{{"}}name{{"}}"}}" + deleteUrl, err := tpgresource.ReplaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + return nil + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + + return nil +} + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_authorization_policy_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_authorization_policy_test.go.tmpl new file mode 100644 index 000000000000..808c5ba38b57 --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_authorization_policy_test.go.tmpl @@ -0,0 +1,83 @@ +package networksecurity_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNetworkSecurityAuthorizationPolicy_update(t *testing.T) { + t.Parallel() + + authorizationPolicyName := fmt.Sprintf("tf-test-authorization-policy-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkSecurityAuthorizationPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecurityAuthorizationPolicy_basic(authorizationPolicyName), + }, + { + ResourceName: "google_network_security_authorization_policy.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecurityAuthorizationPolicy_update(authorizationPolicyName), + }, + { + ResourceName: "google_network_security_authorization_policy.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkSecurityAuthorizationPolicy_basic(authorizationPolicyName string) string { + return fmt.Sprintf(` + resource "google_network_security_authorization_policy" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "my description" + action = "ALLOW" + rules { + sources { + principals = ["namespace/*"] + ip_blocks = ["1.2.3.0/24"] + } + } + } +`, authorizationPolicyName) +} + +func testAccNetworkSecurityAuthorizationPolicy_update(authorizationPolicyName string) string { + return fmt.Sprintf(` + resource "google_network_security_authorization_policy" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "updated description" + action = "DENY" + rules { + sources { + principals = ["namespace1/*"] + ip_blocks = ["1.2.3.0/24"] + } + } + } +`, authorizationPolicyName) +} + +{{ end }} + diff --git a/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_client_tls_policy_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_client_tls_policy_test.go.tmpl new file mode 100644 index 000000000000..0cda14504a41 --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_client_tls_policy_test.go.tmpl @@ -0,0 +1,96 @@ +package networksecurity_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNetworkSecurityClientTlsPolicy_update(t *testing.T) { + t.Parallel() + + clientTlsPolicyName := fmt.Sprintf("tf-test-client-tls-policy-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkSecurityClientTlsPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecurityClientTlsPolicy_basic(clientTlsPolicyName), + }, + { + ResourceName: "google_network_security_client_tls_policy.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecurityClientTlsPolicy_update(clientTlsPolicyName), + }, + { + ResourceName: "google_network_security_client_tls_policy.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkSecurityClientTlsPolicy_basic(clientTlsPolicyName string) string { + return fmt.Sprintf(` + resource "google_network_security_client_tls_policy" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "my description" + sni = "secure.example.com" + client_certificate { + certificate_provider_instance { + plugin_instance = "google_cloud_private_spiffe" + } + } + server_validation_ca { + grpc_endpoint { + target_uri = "unix:mypath" + } + } + } +`, clientTlsPolicyName) +} + +func testAccNetworkSecurityClientTlsPolicy_update(clientTlsPolicyName string) string { + return fmt.Sprintf(` + resource "google_network_security_client_tls_policy" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "updated description" + sni = "secure1.example.com" + client_certificate { + certificate_provider_instance { + plugin_instance = "google_cloud" + } + } + server_validation_ca { + grpc_endpoint { + target_uri = "unix:mypath1" + } + } + server_validation_ca { + grpc_endpoint { + target_uri = "unix:mypath2" + } + } + } +`, clientTlsPolicyName) +} + +{{ end }} + diff --git a/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_firewall_endpoint_association_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_firewall_endpoint_association_test.go.tmpl new file mode 100644 index 000000000000..0cc684a4ce64 --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_firewall_endpoint_association_test.go.tmpl @@ -0,0 +1,214 @@ +package networksecurity_test + +import ( + "fmt" + "strings" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestAccNetworkSecurityFirewallEndpointAssociations_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "orgId": envvar.GetTestOrgFromEnv(t), + "randomSuffix": acctest.RandString(t, 10), + "billingProjectId": envvar.GetTestProjectFromEnv(), + "disabled": strconv.FormatBool(false), + } + + testResourceName := "google_network_security_firewall_endpoint_association.foobar" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkSecurityFirewallEndpointDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecurityFirewallEndpointAssociation_basic(context), + }, + { + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecurityFirewallEndpointAssociation_update(context), + }, + { + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccNetworkSecurityFirewallEndpointAssociations_disabled(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "orgId": envvar.GetTestOrgFromEnv(t), + "randomSuffix": acctest.RandString(t, 10), + "billingProjectId": envvar.GetTestProjectFromEnv(), + } + + testResourceName := "google_network_security_firewall_endpoint_association.foobar" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkSecurityFirewallEndpointDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecurityFirewallEndpointAssociation_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(testResourceName, "disabled", "false"), + ), + }, + { + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecurityFirewallEndpointAssociation_update(testContextMapDisabledField(context, true)), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(testResourceName, "disabled", "true"), + ), + }, + { + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecurityFirewallEndpointAssociation_update(testContextMapDisabledField(context, false)), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(testResourceName, "disabled", "false"), + ), + }, + { + ResourceName: testResourceName, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testContextMapDisabledField(context map[string]interface{}, disabled bool) map[string]interface{} { + context["disabled"] = strconv.FormatBool(disabled) + return context +} + +func testAccNetworkSecurityFirewallEndpointAssociation_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "foobar" { + name = "tf-test-my-vpc%{randomSuffix}" + auto_create_subnetworks = false +} + +resource "google_network_security_firewall_endpoint" "foobar" { + name = "tf-test-my-firewall-endpoint%{randomSuffix}" + parent = "organizations/%{orgId}" + location = "us-central1-a" + billing_project_id = "%{billingProjectId}" +} + +# TODO: add tlsInspectionPolicy once resource is ready +resource "google_network_security_firewall_endpoint_association" "foobar" { + name = "tf-test-my-firewall-endpoint-association%{randomSuffix}" + parent = "projects/%{billingProjectId}" + location = "us-central1-a" + firewall_endpoint = google_network_security_firewall_endpoint.foobar.id + network = google_compute_network.foobar.id + + labels = { + foo = "bar" + } +} +`, context) +} + +func testAccNetworkSecurityFirewallEndpointAssociation_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "foobar" { + name = "tf-test-my-vpc%{randomSuffix}" + auto_create_subnetworks = false +} + +resource "google_network_security_firewall_endpoint" "foobar" { + name = "tf-test-my-firewall-endpoint%{randomSuffix}" + parent = "organizations/%{orgId}" + location = "us-central1-a" + billing_project_id = "%{billingProjectId}" +} + +# TODO: add tlsInspectionPolicy once resource is ready +resource "google_network_security_firewall_endpoint_association" "foobar" { + name = "tf-test-my-firewall-endpoint-association%{randomSuffix}" + parent = "projects/%{billingProjectId}" + location = "us-central1-a" + firewall_endpoint = google_network_security_firewall_endpoint.foobar.id + network = google_compute_network.foobar.id + disabled = "%{disabled}" + + labels = { + foo = "bar-updated" + } +} +`, context) +} + +func testAccCheckNetworkSecurityFirewallEndpointAssociationDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_network_security_firewall_endpoint_association" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}NetworkSecurityBasePath{{"}}"}}{{"{{"}}parent{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/firewallEndpointAssociations/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("NetworkSecurityFirewallEndpointAssociation still exists at %s", url) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_firewall_endpoint_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_firewall_endpoint_test.go.tmpl new file mode 100644 index 000000000000..e50b644a141a --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_firewall_endpoint_test.go.tmpl @@ -0,0 +1,119 @@ +package networksecurity_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" +) + +func TestAccNetworkSecurityFirewallEndpoints_basic(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + billingProjectId := envvar.GetTestProjectFromEnv() + orgId := envvar.GetTestOrgFromEnv(t) + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkSecurityFirewallEndpointDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecurityFirewallEndpoints_basic(orgId, billingProjectId, randomSuffix), + }, + { + ResourceName: "google_network_security_firewall_endpoint.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecurityFirewallEndpoints_update(orgId, billingProjectId, randomSuffix), + }, + { + ResourceName: "google_network_security_firewall_endpoint.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkSecurityFirewallEndpoints_basic(orgId string, billingProjectId string, randomSuffix string) string { + return fmt.Sprintf(` +resource "google_network_security_firewall_endpoint" "foobar" { + name = "tf-test-my-firewall-endpoint%[1]s" + parent = "organizations/%[2]s" + location = "us-central1-a" + billing_project_id = "%[3]s" + + labels = { + foo = "bar" + } +} +`, randomSuffix, orgId, billingProjectId) +} + +func testAccNetworkSecurityFirewallEndpoints_update(orgId string, billingProjectId string, randomSuffix string) string { + return fmt.Sprintf(` +resource "google_network_security_firewall_endpoint" "foobar" { + name = "tf-test-my-firewall-endpoint%[1]s" + parent = "organizations/%[2]s" + location = "us-central1-a" + billing_project_id = "%[3]s" + + labels = { + foo = "bar-updated" + } +} +`, randomSuffix, orgId, billingProjectId) +} + +func testAccCheckNetworkSecurityFirewallEndpointDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_network_security_firewall_endpoint" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}NetworkSecurityBasePath{{"}}"}}{{"{{"}}parent{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/firewallEndpoints/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("NetworkSecurityFirewallEndpoint still exists at %s", url) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_security_profile_group_test.go b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_security_profile_group_test.go new file mode 100644 index 000000000000..f7eddcee9826 --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_security_profile_group_test.go @@ -0,0 +1,96 @@ +package networksecurity_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccNetworkSecuritySecurityProfileGroups_update(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkSecuritySecurityProfileGroupDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecuritySecurityProfileGroups_basic(orgId, randomSuffix), + }, + { + ResourceName: "google_network_security_security_profile_group.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecuritySecurityProfileGroups_update(orgId, randomSuffix), + }, + { + ResourceName: "google_network_security_security_profile_group.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkSecuritySecurityProfileGroups_basic(orgId string, randomSuffix string) string { + return fmt.Sprintf(` +resource "google_network_security_security_profile" "foobar" { + name = "tf-test-my-security-profile%s" + type = "THREAT_PREVENTION" + parent = "organizations/%s" + location = "global" +} + +resource "google_network_security_security_profile_group" "foobar" { + name = "tf-test-my-security-profile-group%s" + parent = "organizations/%s" + location = "global" + description = "My security profile group." + threat_prevention_profile = google_network_security_security_profile.foobar.id + + labels = { + foo = "bar" + } +} +`, randomSuffix, orgId, randomSuffix, orgId) +} + +func testAccNetworkSecuritySecurityProfileGroups_update(orgId string, randomSuffix string) string { + return fmt.Sprintf(` +resource "google_network_security_security_profile" "foobar" { + name = "tf-test-my-security-profile%s" + type = "THREAT_PREVENTION" + parent = "organizations/%s" + location = "global" +} + +resource "google_network_security_security_profile" "foobar_updated" { + name = "tf-test-my-security-profile-updated%s" + type = "THREAT_PREVENTION" + parent = "organizations/%s" + location = "global" +} + +resource "google_network_security_security_profile_group" "foobar" { + name = "tf-test-my-security-profile-group%s" + parent = "organizations/%s" + location = "global" + description = "My security profile group. Update" + threat_prevention_profile = google_network_security_security_profile.foobar_updated.id + + labels = { + foo = "foo" + } +} +`, randomSuffix, orgId, randomSuffix, orgId, randomSuffix, orgId) +} diff --git a/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_security_profile_test.go b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_security_profile_test.go new file mode 100644 index 000000000000..70afec07b0e6 --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_security_profile_test.go @@ -0,0 +1,87 @@ +package networksecurity_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccNetworkSecuritySecurityProfiles_update(t *testing.T) { + t.Parallel() + + orgId := envvar.GetTestOrgFromEnv(t) + randomSuffix := acctest.RandString(t, 10) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkSecuritySecurityProfileDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecuritySecurityProfiles_basic(orgId, randomSuffix), + }, + { + ResourceName: "google_network_security_security_profile.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecuritySecurityProfiles_update(orgId, randomSuffix), + }, + { + ResourceName: "google_network_security_security_profile.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkSecuritySecurityProfiles_basic(orgId string, randomSuffix string) string { + return fmt.Sprintf(` +resource "google_network_security_security_profile" "foobar" { + name = "tf-test-my-security-profile%s" + parent = "organizations/%s" + location = "global" + description = "My security profile." + type = "THREAT_PREVENTION" + + labels = { + foo = "bar" + } +} +`, randomSuffix, orgId) +} + +func testAccNetworkSecuritySecurityProfiles_update(orgId string, randomSuffix string) string { + return fmt.Sprintf(` +resource "google_network_security_security_profile" "foobar" { + name = "tf-test-my-security-profile%s" + parent = "organizations/%s" + location = "global" + description = "My security profile. Update" + type = "THREAT_PREVENTION" + + labels = { + foo = "foo" + } + + threat_prevention_profile { + severity_overrides { + action = "ALLOW" + severity = "INFORMATIONAL" + } + + severity_overrides { + action = "DENY" + severity = "HIGH" + } + } +} +`, randomSuffix, orgId) +} diff --git a/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_server_tls_policy_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_server_tls_policy_test.go.tmpl new file mode 100644 index 000000000000..838d84ccd3b9 --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_server_tls_policy_test.go.tmpl @@ -0,0 +1,81 @@ +package networksecurity_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNetworkSecurityServerTlsPolicy_update(t *testing.T) { + t.Parallel() + + serverTlsPolicyName := fmt.Sprintf("tf-test-server-tls-policy-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkSecurityServerTlsPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecurityServerTlsPolicy_basic(serverTlsPolicyName), + }, + { + ResourceName: "google_network_security_server_tls_policy.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkSecurityServerTlsPolicy_update(serverTlsPolicyName), + }, + { + ResourceName: "google_network_security_server_tls_policy.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkSecurityServerTlsPolicy_basic(serverTlsPolicyName string) string { + return fmt.Sprintf(` + resource "google_network_security_server_tls_policy" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "my description" + allow_open = "false" + server_certificate { + certificate_provider_instance { + plugin_instance = "google_cloud_private_spiffe" + } + } + } +`, serverTlsPolicyName) +} + +func testAccNetworkSecurityServerTlsPolicy_update(serverTlsPolicyName string) string { + return fmt.Sprintf(` + resource "google_network_security_server_tls_policy" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "updated description" + allow_open = "false" + server_certificate { + certificate_provider_instance { + plugin_instance = "google_cloud_private_spiffe" + } + } + } +`, serverTlsPolicyName) +} + +{{ end }} + diff --git a/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_tls_inspection_policy_test.go.tmpl b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_tls_inspection_policy_test.go.tmpl new file mode 100644 index 000000000000..51820210982d --- /dev/null +++ b/mmv1/third_party/terraform/services/networksecurity/go/resource_network_security_tls_inspection_policy_test.go.tmpl @@ -0,0 +1,380 @@ +package networksecurity_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccNetworkSecurityTlsInspectionPolicy_update(t *testing.T){ + t.Parallel() + + context := map[string]interface{}{ + "projectNumber": envvar.GetTestProjectNumberFromEnv(), + "randomSuffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckNetworkSecurityTlsInspectionPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkSecurityTlsInspectionPolicy_basic(context), + }, + { + ResourceName: "google_network_security_tls_inspection_policy.default", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccNetworkSecurityTlsInspectionPolicy_update(context), + }, + { + ResourceName: "google_network_security_tls_inspection_policy.default", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccNetworkSecurityTlsInspectionPolicy_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_privateca_ca_pool" "default" { + provider = google-beta + name = "tf-test-cap-%{randomSuffix}" + location = "us-central1" + tier = "DEVOPS" + + publishing_options { + publish_ca_cert = false + publish_crl = false + } + + issuance_policy { + maximum_lifetime = "1209600s" + baseline_values { + ca_options { + is_ca = false + } + key_usage { + base_key_usage {} + extended_key_usage { + server_auth = true + } + } + } + } +} + +resource "google_privateca_certificate_authority" "default" { + provider = google-beta + pool = google_privateca_ca_pool.default.name + certificate_authority_id = "tf-test-ca-%{randomSuffix}" + location = "us-central1" + lifetime = "86400s" + type = "SELF_SIGNED" + deletion_protection = false + skip_grace_period = true + ignore_active_certificates_on_deletion = true + + config { + subject_config { + subject { + organization = "Test LLC" + common_name = "my-ca" + } + } + x509_config { + ca_options { + is_ca = true + } + key_usage { + base_key_usage { + cert_sign = true + crl_sign = true + } + extended_key_usage { + server_auth = false + } + } + } + } + + key_spec { + algorithm = "RSA_PKCS1_4096_SHA256" + } +} + +resource "google_project_service_identity" "default" { + provider = google-beta + service = "networksecurity.googleapis.com" +} + +resource "google_privateca_ca_pool_iam_member" "default" { + provider = google-beta + ca_pool = google_privateca_ca_pool.default.id + role = "roles/privateca.certificateManager" + member = "serviceAccount:${google_project_service_identity.default.email}" +} + +resource "google_certificate_manager_trust_config" "default" { + provider = google-beta + name = "tf-test-tc-%{randomSuffix}" + description = "sample trust config description" + location = "us-central1" + + trust_stores { + trust_anchors { + pem_certificate = file("test-fixtures/ca_cert.pem") + } + intermediate_cas { + pem_certificate = file("test-fixtures/ca_cert.pem") + } + } +} + +resource "google_network_security_tls_inspection_policy" "default" { + provider = google-beta + name = "tf-test-tip-%{randomSuffix}" + location = "us-central1" + ca_pool = google_privateca_ca_pool.default.id + exclude_public_ca_set = false + min_tls_version = "TLS_1_0" + trust_config = google_certificate_manager_trust_config.default.id + tls_feature_profile = "PROFILE_CUSTOM" + + custom_tls_features = [ + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_RSA_WITH_3DES_EDE_CBC_SHA", + "TLS_RSA_WITH_AES_128_CBC_SHA", + "TLS_RSA_WITH_AES_128_GCM_SHA256", + "TLS_RSA_WITH_AES_256_CBC_SHA", + "TLS_RSA_WITH_AES_256_GCM_SHA384" + ] + + depends_on = [ + google_privateca_certificate_authority.default, + google_privateca_ca_pool_iam_member.default + ] +} +`, context) +} + +func testAccNetworkSecurityTlsInspectionPolicy_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_privateca_ca_pool" "default" { + provider = google-beta + name = "tf-test-cap-%{randomSuffix}" + location = "us-central1" + tier = "DEVOPS" + + publishing_options { + publish_ca_cert = false + publish_crl = false + } + + issuance_policy { + maximum_lifetime = "1209600s" + baseline_values { + ca_options { + is_ca = false + } + key_usage { + base_key_usage {} + extended_key_usage { + server_auth = true + } + } + } + } +} + +resource "google_privateca_ca_pool" "default_updated" { + provider = google-beta + name = "tf-test-cap-updated-%{randomSuffix}" + location = "us-central1" + tier = "DEVOPS" + + publishing_options { + publish_ca_cert = false + publish_crl = false + } + + issuance_policy { + maximum_lifetime = "1209600s" + baseline_values { + ca_options { + is_ca = false + } + key_usage { + base_key_usage {} + extended_key_usage { + server_auth = true + } + } + } + } +} + +resource "google_privateca_certificate_authority" "default" { + provider = google-beta + pool = google_privateca_ca_pool.default.name + certificate_authority_id = "tf-test-ca-%{randomSuffix}" + location = "us-central1" + lifetime = "86400s" + type = "SELF_SIGNED" + deletion_protection = false + skip_grace_period = true + ignore_active_certificates_on_deletion = true + + config { + subject_config { + subject { + organization = "Test LLC" + common_name = "my-ca" + } + } + x509_config { + ca_options { + is_ca = true + } + key_usage { + base_key_usage { + cert_sign = true + crl_sign = true + } + extended_key_usage { + server_auth = false + } + } + } + } + + key_spec { + algorithm = "RSA_PKCS1_4096_SHA256" + } +} + +resource "google_privateca_certificate_authority" "default_updated" { + provider = google-beta + pool = google_privateca_ca_pool.default_updated.name + certificate_authority_id = "tf-test-ca-%{randomSuffix}" + location = "us-central1" + lifetime = "86400s" + type = "SELF_SIGNED" + deletion_protection = false + skip_grace_period = true + ignore_active_certificates_on_deletion = true + + config { + subject_config { + subject { + organization = "Test LLC" + common_name = "my-ca" + } + } + x509_config { + ca_options { + is_ca = true + } + key_usage { + base_key_usage { + cert_sign = true + crl_sign = true + } + extended_key_usage { + server_auth = false + } + } + } + } + + key_spec { + algorithm = "RSA_PKCS1_4096_SHA256" + } +} + +resource "google_project_service_identity" "default" { + provider = google-beta + service = "networksecurity.googleapis.com" +} + +resource "google_privateca_ca_pool_iam_member" "default" { + provider = google-beta + ca_pool = google_privateca_ca_pool.default.id + role = "roles/privateca.certificateManager" + member = "serviceAccount:${google_project_service_identity.default.email}" +} + +resource "google_privateca_ca_pool_iam_member" "default_updated" { + provider = google-beta + ca_pool = google_privateca_ca_pool.default_updated.id + role = "roles/privateca.certificateManager" + member = "serviceAccount:${google_project_service_identity.default.email}" +} + +resource "google_certificate_manager_trust_config" "default" { + provider = google-beta + name = "tf-test-tc-%{randomSuffix}" + description = "sample trust config description" + location = "us-central1" + + trust_stores { + trust_anchors { + pem_certificate = file("test-fixtures/ca_cert.pem") + } + intermediate_cas { + pem_certificate = file("test-fixtures/ca_cert.pem") + } + } +} + +resource "google_certificate_manager_trust_config" "default_updated" { + provider = google-beta + name = "tf-test-tc-updated-%{randomSuffix}" + description = "another sample trust config description" + location = "us-central1" + + trust_stores { + trust_anchors { + pem_certificate = file("test-fixtures/ca_cert.pem") + } + intermediate_cas { + pem_certificate = file("test-fixtures/ca_cert.pem") + } + } +} + +resource "google_network_security_tls_inspection_policy" "default" { + provider = google-beta + name = "tf-test-tip-%{randomSuffix}" + location = "us-central1" + description = "my tls inspection policy updated" + ca_pool = google_privateca_ca_pool.default_updated.id + exclude_public_ca_set = true + min_tls_version = "TLS_1_2" + trust_config = google_certificate_manager_trust_config.default_updated.id + + depends_on = [ + google_privateca_certificate_authority.default_updated, + google_privateca_ca_pool_iam_member.default_updated + ] +} +`, context) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_endpoint_policy_test.go.tmpl b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_endpoint_policy_test.go.tmpl new file mode 100644 index 000000000000..d3aa003ac92b --- /dev/null +++ b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_endpoint_policy_test.go.tmpl @@ -0,0 +1,92 @@ +package networkservices_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccNetworkServicesEndpointPolicy_update(t *testing.T) { + t.Parallel() + + endpointPolicyName := fmt.Sprintf("tf-test-endpoint-policy-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesEndpointPolicyDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesEndpointPolicy_basic(endpointPolicyName), + }, + { + ResourceName: "google_network_services_endpoint_policy.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkServicesEndpointPolicy_update(endpointPolicyName), + }, + { + ResourceName: "google_network_services_endpoint_policy.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkServicesEndpointPolicy_basic(endpointPolicyName string) string { + return fmt.Sprintf(` +resource "google_network_services_endpoint_policy" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "my description" + type = "SIDECAR_PROXY" + traffic_port_selector { + ports = ["8081"] + } + endpoint_matcher { + metadata_label_matcher { + metadata_label_match_criteria = "MATCH_ANY" + metadata_labels { + label_name = "foo" + label_value = "bar" + } + } + } +} +`, endpointPolicyName) +} + +func testAccNetworkServicesEndpointPolicy_update(endpointPolicyName string) string { + return fmt.Sprintf(` +resource "google_network_services_endpoint_policy" "foobar" { + name = "%s" + labels = { + foo = "barbar" + baz = "qux" + } + description = "update description" + type = "GRPC_SERVER" + endpoint_matcher { + metadata_label_matcher { + metadata_label_match_criteria = "MATCH_ALL" + metadata_labels { + label_name = "baz" + label_value = "bux" + } + } + } +} +`, endpointPolicyName) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_grpc_route_test.go.tmpl b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_grpc_route_test.go.tmpl new file mode 100644 index 000000000000..f37749eb11e8 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_grpc_route_test.go.tmpl @@ -0,0 +1,133 @@ +package networkservices_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNetworkServicesGrpcRoute_update(t *testing.T) { + t.Parallel() + + grpcRouteName := fmt.Sprintf("tf-test-grpc-route-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesGrpcRouteDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesGrpcRoute_basic(grpcRouteName), + }, + { + ResourceName: "google_network_services_grpc_route.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkServicesGrpcRoute_update(grpcRouteName), + }, + { + ResourceName: "google_network_services_grpc_route.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkServicesGrpcRoute_basic(grpcRouteName string) string { + return fmt.Sprintf(` + resource "google_network_services_grpc_route" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "my description" + hostnames = ["example"] + rules { + matches { + headers { + key = "key" + value = "value" + } + } + action { + retry_policy { + retry_conditions = ["cancelled"] + num_retries = 1 + } + } + } + rules { + matches { + headers { + key = "key" + value = "value" + } + } + action { + fault_injection_policy { + delay { + fixed_delay = "1s" + percentage = 1 + } + abort { + http_status = 500 + percentage = 1 + } + } + } + } + } +`, grpcRouteName) +} + +func testAccNetworkServicesGrpcRoute_update(grpcRouteName string) string { + return fmt.Sprintf(` + resource "google_network_services_grpc_route" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "updated description" + hostnames = ["example"] + rules { + matches { + headers { + key = "key" + value = "value" + } + } + action { + retry_policy { + retry_conditions = ["cancelled"] + num_retries = 2 + } + } + } + rules { + matches { + headers { + key = "key1" + value = "value1" + } + } + action { + retry_policy { + retry_conditions = ["connect-failure"] + num_retries = 1 + } + } + } + } +`, grpcRouteName) +} + +{{ end }} + diff --git a/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_http_route_test.go.tmpl b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_http_route_test.go.tmpl new file mode 100644 index 000000000000..678d10758e8f --- /dev/null +++ b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_http_route_test.go.tmpl @@ -0,0 +1,85 @@ +package networkservices_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccNetworkServicesHttpRoute_update(t *testing.T) { + t.Parallel() + + httpRouteName := fmt.Sprintf("tf-test-http-route-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesHttpRouteDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesHttpRoute_basic(httpRouteName), + }, + { + ResourceName: "google_network_services_http_route.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkServicesHttpRoute_update(httpRouteName), + }, + { + ResourceName: "google_network_services_http_route.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkServicesHttpRoute_basic(httpRouteName string) string { + return fmt.Sprintf(` +resource "google_network_services_http_route" "foobar" { + name = "%s" + description = "my description" + hostnames = ["example"] + rules { + matches { + query_parameters { + query_parameter = "key" + exact_match = "value" + } + full_path_match = "example" + } + } +} +`, httpRouteName) +} + +func testAccNetworkServicesHttpRoute_update(httpRouteName string) string { + return fmt.Sprintf(` +resource "google_network_services_http_route" "foobar" { + name = "%s" + description = "update description" + labels = { + foo = "bar" + } + hostnames = ["example"] + rules { + matches { + query_parameters { + query_parameter = "key" + exact_match = "value" + } + full_path_match = "example" + } + } +} +`, httpRouteName) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_mesh_test.go.tmpl b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_mesh_test.go.tmpl new file mode 100644 index 000000000000..8db8475759c6 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_mesh_test.go.tmpl @@ -0,0 +1,65 @@ +package networkservices_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccNetworkServicesMesh_update(t *testing.T) { + t.Parallel() + + meshName := fmt.Sprintf("tf-test-mesh-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesMeshDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesMesh_basic(meshName), + }, + { + ResourceName: "google_network_services_mesh.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkServicesMesh_update(meshName), + }, + { + ResourceName: "google_network_services_mesh.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkServicesMesh_basic(meshName string) string { + return fmt.Sprintf(` +resource "google_network_services_mesh" "foobar" { + name = "%s" + description = "my description" +} +`, meshName) +} + +func testAccNetworkServicesMesh_update(meshName string) string { + return fmt.Sprintf(` +resource "google_network_services_mesh" "foobar" { + name = "%s" + description = "update description" + labels = { + foo = "bar" + } +} +`, meshName) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_service_binding_test.go.tmpl b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_service_binding_test.go.tmpl new file mode 100644 index 000000000000..b9ad131be383 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_service_binding_test.go.tmpl @@ -0,0 +1,60 @@ +package networkservices_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccNetworkServicesServiceBinding_update(t *testing.T) { + t.Parallel() + + serviceNamespace := fmt.Sprintf("tf-test-service-namespace-%s", acctest.RandString(t, 10)) + serviceName := fmt.Sprintf("tf-test-service-%s", acctest.RandString(t, 10)) + serviceBindingName := fmt.Sprintf("tf-test-service-binding-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesServiceBindingDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesServiceBinding_create(serviceNamespace, serviceName, serviceBindingName), + }, + { + ResourceName: "google_network_services_service_binding.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccNetworkServicesServiceBinding_create(serviceNamespace string, serviceName string, serviceBindingName string) string { + return fmt.Sprintf(` + + resource "google_service_directory_namespace" "foo" { + namespace_id = "%s" + location = "us-central1" + } + resource "google_service_directory_service" "bar" { + service_id = "%s" + namespace = google_service_directory_namespace.foo.id + + metadata = { + stage = "prod" + region = "us-central1" + } + } + resource "google_network_services_service_binding" "foobar" { + name = "%s" + description = "my description" + service = google_service_directory_service.bar.id + } +`, serviceNamespace, serviceName, serviceBindingName) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_service_lb_policies_test.go.tmpl b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_service_lb_policies_test.go.tmpl new file mode 100644 index 000000000000..e592e521843f --- /dev/null +++ b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_service_lb_policies_test.go.tmpl @@ -0,0 +1,78 @@ +package networkservices_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccNetworkServicesLBPolicies_update(t *testing.T) { + t.Parallel() + + policyName := fmt.Sprintf("tf-test-lb-policy-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesServiceLbPoliciesDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesLBPolicies_basic(policyName), + }, + { + ResourceName: "google_network_services_service_lb_policies.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkServicesLBPolicies_update(policyName), + }, + { + ResourceName: "google_network_services_service_lb_policies.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkServicesLBPolicies_basic(policyName string) string { + return fmt.Sprintf(` +resource "google_network_services_service_lb_policies" "foobar" { + name = "%s" + location = "global" + description = "my description" +} +`, policyName) +} + +func testAccNetworkServicesLBPolicies_update(policyName string) string { + return fmt.Sprintf(` +resource "google_network_services_service_lb_policies" "foobar" { + name = "%s" + location = "global" + description = "my description" + load_balancing_algorithm = "SPRAY_TO_REGION" + + auto_capacity_drain { + enable = true + } + + failover_config { + failover_health_threshold = 70 + } + + labels = { + foo = "bar" + } +} +`, policyName) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_tcp_route_test.go.tmpl b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_tcp_route_test.go.tmpl new file mode 100644 index 000000000000..672cec100e97 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_tcp_route_test.go.tmpl @@ -0,0 +1,120 @@ +package networkservices_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccNetworkServicesTcpRoute_update(t *testing.T) { + t.Parallel() + + tcpServiceName := fmt.Sprintf("tf-test-tcp-service-%s", acctest.RandString(t, 10)) + tcpHealthCheckName := fmt.Sprintf("tf-test-tcp-healthcheck-%s", acctest.RandString(t, 10)) + tcpRouteName := fmt.Sprintf("tf-test-tcp-route-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesTcpRouteDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesTcpRoute_basic(tcpServiceName, tcpHealthCheckName, tcpRouteName), + }, + { + ResourceName: "google_network_services_tcp_route.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNetworkServicesTcpRoute_update(tcpServiceName, tcpHealthCheckName, tcpRouteName), + }, + { + ResourceName: "google_network_services_tcp_route.foobar", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccNetworkServicesTcpRoute_basic(tcpServiceName string, tcpHealthCheckName string, tcpRouteName string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foo" { + name = "%s" + health_checks = [google_compute_http_health_check.bar.id] +} + +resource "google_compute_http_health_check" "bar" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_network_services_tcp_route" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "my description" + rules { + matches { + address = "10.0.0.1/32" + port = "8081" + } + action { + destinations { + service_name = google_compute_backend_service.foo.id + weight = 1 + } + original_destination = false + } + } +} +`, tcpServiceName, tcpHealthCheckName, tcpRouteName) +} + +func testAccNetworkServicesTcpRoute_update(tcpServiceName string, tcpHealthCheckName string, tcpRouteName string) string { + return fmt.Sprintf(` + resource "google_compute_backend_service" "foo" { + name = "%s" + health_checks = [google_compute_http_health_check.bar.id] +} + +resource "google_compute_http_health_check" "bar" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_network_services_tcp_route" "foobar" { + name = "%s" + labels = { + foo = "bar" + } + description = "update description" + rules { + matches { + address = "10.0.0.1/32" + port = "8081" + } + action { + destinations { + service_name = google_compute_backend_service.foo.id + weight = 1 + } + original_destination = false + } + } +} +`, tcpServiceName, tcpHealthCheckName, tcpRouteName) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_tls_route_test.go.tmpl b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_tls_route_test.go.tmpl new file mode 100644 index 000000000000..11896ecb4b40 --- /dev/null +++ b/mmv1/third_party/terraform/services/networkservices/go/resource_network_services_tls_route_test.go.tmpl @@ -0,0 +1,110 @@ +package networkservices_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccNetworkServicesTlsRoute_update(t *testing.T) { + t.Parallel() + + tlsServiceName := fmt.Sprintf("tf-test-tls-service-%s", acctest.RandString(t, 10)) + tlsHealthCheckName := fmt.Sprintf("tf-test-tls-healthcheck-%s", acctest.RandString(t, 10)) + tlsRouteName := fmt.Sprintf("tf-test-tls-route-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNetworkServicesTlsRouteDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNetworkServicesTlsRoute_basic(tlsServiceName, tlsHealthCheckName, tlsRouteName), + }, + { + ResourceName: "google_network_services_tls_route.foobar", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccNetworkServicesTlsRoute_update(tlsServiceName, tlsHealthCheckName, tlsRouteName), + }, + { + ResourceName: "google_network_services_tls_route.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccNetworkServicesTlsRoute_basic(tlsServiceName string, tlsHealthCheckName string, tlsRouteName string) string { + return fmt.Sprintf(` +resource "google_compute_backend_service" "foo" { + name = "%s" + health_checks = [google_compute_http_health_check.bar.id] +} + +resource "google_compute_http_health_check" "bar" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 +} + +resource "google_network_services_tls_route" "foobar" { + name = "%s" + description = "my description" + rules { + matches { + sni_host = ["example.com"] + alpn = ["http/1.1"] + } + action { + destinations { + service_name = google_compute_backend_service.foo.id + weight = 1 + } + } + } +} +`, tlsServiceName, tlsHealthCheckName, tlsRouteName) +} + +func testAccNetworkServicesTlsRoute_update(tlsServiceName string, tlsHealthCheckName string, tlsRouteName string) string { + return fmt.Sprintf(` + resource "google_compute_backend_service" "foo" { + name = "%s" + health_checks = [google_compute_http_health_check.bar.id] + } + + resource "google_compute_http_health_check" "bar" { + name = "%s" + request_path = "/" + check_interval_sec = 1 + timeout_sec = 1 + } + + resource "google_network_services_tls_route" "foobar" { + name = "%s" + description = "update description" + rules { + matches { + sni_host = ["example.com"] + alpn = ["http/1.1"] + } + action { + destinations { + service_name = google_compute_backend_service.foo.id + weight = 1 + } + } + } + } +`, tlsServiceName, tlsHealthCheckName, tlsRouteName) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_environment_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_environment_test.go.tmpl new file mode 100644 index 000000000000..984d4fd05686 --- /dev/null +++ b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_environment_test.go.tmpl @@ -0,0 +1,46 @@ +package notebooks_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNotebooksEnvironment_create(t *testing.T) { + t.Parallel() + + prefix := fmt.Sprintf("%d", acctest.RandInt(t)) + name := fmt.Sprintf("tf-env-%s", prefix) + + acctest.VcrTest(t, resource.TestCase{ + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccNotebooksEnvironment_create(name), + }, + { + ResourceName: "google_notebooks_environment.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccNotebooksEnvironment_create(name string) string { + return fmt.Sprintf(` + +resource "google_notebooks_environment" "test" { + name = "%s" + location = "us-west1-a" + container_image { + repository = "gcr.io/deeplearning-platform-release/base-cpu" + } +} +`, name) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_container_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_container_test.go.tmpl new file mode 100644 index 000000000000..44f23aeb8e79 --- /dev/null +++ b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_container_test.go.tmpl @@ -0,0 +1,53 @@ +package notebooks_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNotebooksInstance_create_container(t *testing.T) { + t.Parallel() + + prefix := fmt.Sprintf("%d", acctest.RandInt(t)) + name := fmt.Sprintf("tf-%s", prefix) + + acctest.VcrTest(t, resource.TestCase{ + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccNotebooksInstance_create_container(name), + }, + { + ResourceName: "google_notebooks_instance.test", + ImportState: true, + ImportStateVerify: true, + ExpectNonEmptyPlan: true, + ImportStateVerifyIgnore: []string{"container_image", "metadata", "vm_image"}, + }, + }, + }) +} + +func testAccNotebooksInstance_create_container(name string) string { + return fmt.Sprintf(` + +resource "google_notebooks_instance" "test" { + name = "%s" + location = "us-west1-a" + machine_type = "e2-medium" + metadata = { + proxy-mode = "service_account" + terraform = "true" + } + container_image { + repository = "gcr.io/deeplearning-platform-release/base-cpu" + tag = "latest" + } +} +`, name) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_gpu_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_gpu_test.go.tmpl new file mode 100644 index 000000000000..ba61e606fd29 --- /dev/null +++ b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_gpu_test.go.tmpl @@ -0,0 +1,58 @@ +package notebooks_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNotebooksInstance_create_gpu(t *testing.T) { + t.Parallel() + + prefix := fmt.Sprintf("%d", acctest.RandInt(t)) + name := fmt.Sprintf("tf-%s", prefix) + + acctest.VcrTest(t, resource.TestCase{ + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccNotebooksInstance_create_gpu(name), + }, + { + ResourceName: "google_notebooks_instance.test", + ImportState: true, + ImportStateVerify: true, + ExpectNonEmptyPlan: true, + ImportStateVerifyIgnore: []string{"container_image", "metadata", "vm_image"}, + }, + }, + }) +} + +func testAccNotebooksInstance_create_gpu(name string) string { + return fmt.Sprintf(` + +resource "google_notebooks_instance" "test" { + name = "%s" + location = "us-west1-a" + machine_type = "n1-standard-1" // can't be e2 because of accelerator + metadata = { + proxy-mode = "service_account" + terraform = "true" + } + vm_image { + project = "deeplearning-platform-release" + image_family = "tf-latest-gpu" + } + install_gpu_driver = true + accelerator_config { + type = "NVIDIA_TESLA_T4" + core_count = 1 + } +} +`, name) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_state_test.go.tmpl b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_state_test.go.tmpl new file mode 100644 index 000000000000..a935c50910e9 --- /dev/null +++ b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_state_test.go.tmpl @@ -0,0 +1,84 @@ +package notebooks_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNotebooksInstance_state(t *testing.T) { + t.Parallel() + + prefix := fmt.Sprintf("%d", acctest.RandInt(t)) + name := fmt.Sprintf("tf-%s", prefix) + + acctest.VcrTest(t, resource.TestCase{ + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccNotebooksInstance_basic_active(name), + }, + { + ResourceName: "google_notebooks_instance.test", + ImportState: true, + ImportStateVerify: true, + ExpectNonEmptyPlan: true, + ImportStateVerifyIgnore: []string{"container_image", "metadata", "vm_image","desired_state", "update_time"}, + }, + { + Config: testAccNotebooksInstance_basic_stopped(name), + }, + { + ResourceName: "google_notebooks_instance.test", + ImportState: true, + ImportStateVerify: true, + ExpectNonEmptyPlan: true, + ImportStateVerifyIgnore: []string{"container_image", "metadata", "vm_image","desired_state", "update_time"}, + }, + { + Config: testAccNotebooksInstance_basic_active(name), + }, + { + ResourceName: "google_notebooks_instance.test", + ImportState: true, + ImportStateVerify: true, + ExpectNonEmptyPlan: true, + ImportStateVerifyIgnore: []string{"container_image", "metadata", "vm_image","desired_state", "update_time"}, + }, + }, + }) +} + +func testAccNotebooksInstance_basic_active(name string) string { + return fmt.Sprintf(` +resource "google_notebooks_instance" "test" { + name = "%s" + location = "us-west1-a" + machine_type = "e2-medium" + vm_image { + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" + } + desired_state = "ACTIVE" +} +`, name) +} + +func testAccNotebooksInstance_basic_stopped(name string) string { + return fmt.Sprintf(` +resource "google_notebooks_instance" "test" { + name = "%s" + location = "us-west1-a" + machine_type = "e2-medium" + vm_image { + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" + } + desired_state = "STOPPED" +} +`, name) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_test.go b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_test.go new file mode 100644 index 000000000000..376cd5cb3465 --- /dev/null +++ b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_instance_test.go @@ -0,0 +1,152 @@ +package notebooks_test + +import ( + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccNotebooksInstance_create_vm_image(t *testing.T) { + t.Parallel() + + prefix := fmt.Sprintf("%d", acctest.RandInt(t)) + name := fmt.Sprintf("tf-%s", prefix) + + acctest.VcrTest(t, resource.TestCase{ + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccNotebooksInstance_create_vm_image(name), + }, + { + ResourceName: "google_notebooks_instance.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"vm_image", "metadata"}, + }, + }, + }) +} + +func TestAccNotebooksInstance_update(t *testing.T) { + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccNotebooksInstance_basic(context), + }, + { + ResourceName: "google_notebooks_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"vm_image", "metadata", "update_time", "proxy_uri", "state"}, + }, + { + Config: testAccNotebooksInstance_update(context, true), + }, + { + ResourceName: "google_notebooks_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"vm_image", "metadata", "labels", "terraform_labels", "update_time", "proxy_uri", "state"}, + }, + { + Config: testAccNotebooksInstance_update(context, false), + }, + { + ResourceName: "google_notebooks_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"vm_image", "metadata", "labels", "terraform_labels", "update_time", "proxy_uri", "state"}, + }, + }, + }) +} + +func testAccNotebooksInstance_create_vm_image(name string) string { + return fmt.Sprintf(` + +resource "google_notebooks_instance" "test" { + name = "%s" + location = "us-west1-a" + machine_type = "e2-medium" + metadata = { + proxy-mode = "service_account" + terraform = "true" + } + + nic_type = "VIRTIO_NET" + + reservation_affinity { + consume_reservation_type = "NO_RESERVATION" + } + + vm_image { + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" + } +} +`, name) +} + +func testAccNotebooksInstance_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_notebooks_instance" "instance" { + name = "tf-test-notebooks-instance%{random_suffix}" + location = "us-central1-a" + machine_type = "e2-medium" + + vm_image { + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" + } + + metadata = { + proxy-mode = "service_account" + terraform = "true" + } + + lifecycle { + prevent_destroy = true + } +} +`, context) +} + +func testAccNotebooksInstance_update(context map[string]interface{}, preventDestroy bool) string { + context["prevent_destroy"] = strconv.FormatBool(preventDestroy) + + return acctest.Nprintf(` +resource "google_notebooks_instance" "instance" { + name = "tf-test-notebooks-instance%{random_suffix}" + location = "us-central1-a" + machine_type = "e2-medium" + + vm_image { + project = "deeplearning-platform-release" + image_family = "tf-latest-cpu" + } + + metadata = { + proxy-mode = "service_account" + terraform = "true" + notebook-upgrade-schedule = "0 * * * *" + } + + labels = { + key = "value" + } + + lifecycle { + prevent_destroy = %{prevent_destroy} + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_runtime_test.go b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_runtime_test.go new file mode 100644 index 000000000000..b87ce5088e13 --- /dev/null +++ b/mmv1/third_party/terraform/services/notebooks/go/resource_notebooks_runtime_test.go @@ -0,0 +1,111 @@ +package notebooks_test + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccNotebooksRuntime_update(t *testing.T) { + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckNotebooksRuntimeDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccNotebooksRuntime_basic(context), + }, + { + ResourceName: "google_notebooks_runtime.runtime", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNotebooksRuntime_update(context), + }, + { + ResourceName: "google_notebooks_runtime.runtime", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + { + Config: testAccNotebooksRuntime_basic(context), + }, + { + ResourceName: "google_notebooks_runtime.runtime", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + + +func testAccNotebooksRuntime_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_notebooks_runtime" "runtime" { + name = "tf-test-notebooks-runtime%{random_suffix}" + location = "us-central1" + access_config { + access_type = "SINGLE_USER" + runtime_owner = "admin@hashicorptest.com" + } + software_config {} + virtual_machine { + virtual_machine_config { + machine_type = "n1-standard-4" + data_disk { + initialize_params { + disk_size_gb = "100" + disk_type = "PD_STANDARD" + } + } + reserved_ip_range = "192.168.255.0/24" + } + } +} +`, context) +} + +func testAccNotebooksRuntime_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_notebooks_runtime" "runtime" { + name = "tf-test-notebooks-runtime%{random_suffix}" + location = "us-central1" + access_config { + access_type = "SINGLE_USER" + runtime_owner = "admin@hashicorptest.com" + } + software_config { + idle_shutdown_timeout = "80" + } + virtual_machine { + virtual_machine_config { + machine_type = "n1-standard-8" + data_disk { + initialize_params { + disk_size_gb = "100" + disk_type = "PD_STANDARD" + } + } + accelerator_config { + core_count = "1" + type = "NVIDIA_TESLA_V100" + } + reserved_ip_range = "192.168.255.0/24" + } + } + labels = { + k = "val" + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/orgpolicy/go/resource_org_policy_custom_constraint_test.go b/mmv1/third_party/terraform/services/orgpolicy/go/resource_org_policy_custom_constraint_test.go new file mode 100644 index 000000000000..e355de3a0714 --- /dev/null +++ b/mmv1/third_party/terraform/services/orgpolicy/go/resource_org_policy_custom_constraint_test.go @@ -0,0 +1,77 @@ +package orgpolicy_test + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccOrgPolicyCustomConstraint_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckOrgPolicyCustomConstraintDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccOrgPolicyCustomConstraint_v1(context), + }, + { + ResourceName: "google_org_policy_custom_constraint.constraint", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + { + Config: testAccOrgPolicyCustomConstraint_v2(context), + }, + { + ResourceName: "google_org_policy_custom_constraint.constraint", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"parent"}, + }, + }, + }) +} + +func testAccOrgPolicyCustomConstraint_v1(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_org_policy_custom_constraint" "constraint" { + name = "custom.tfTest%{random_suffix}" + parent = "organizations/%{org_id}" + display_name = "Disable GKE auto upgrade" + description = "Only allow GKE NodePool resource to be created or updated if AutoUpgrade is not enabled where this custom constraint is enforced." + + action_type = "ALLOW" + condition = "resource.management.autoUpgrade == false" + method_types = ["CREATE", "UPDATE"] + resource_types = ["container.googleapis.com/NodePool"] +} +`, context) +} + +func testAccOrgPolicyCustomConstraint_v2(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_org_policy_custom_constraint" "constraint" { + name = "custom.tfTest%{random_suffix}" + parent = "organizations/%{org_id}" + display_name = "Updated" + description = "Updated" + + action_type = "DENY" + condition = "resource.management.autoUpgrade == true" + method_types = ["CREATE"] + resource_types = ["container.googleapis.com/NodePool"] +} +`, context) +} + diff --git a/mmv1/third_party/terraform/services/osconfig/go/resource_os_config_os_policy_assignment_test.go.tmpl b/mmv1/third_party/terraform/services/osconfig/go/resource_os_config_os_policy_assignment_test.go.tmpl new file mode 100644 index 000000000000..3fdf2ab6dfd0 --- /dev/null +++ b/mmv1/third_party/terraform/services/osconfig/go/resource_os_config_os_policy_assignment_test.go.tmpl @@ -0,0 +1,256 @@ + +package osconfig_test + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccOSConfigOSPolicyAssignment_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckOSConfigOSPolicyAssignmentDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccOSConfigOSPolicyAssignment_basic(context), + }, + { + ResourceName: "google_os_config_os_policy_assignment.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"rollout.0.min_wait_duration"}, + }, + { + Config: testAccOSConfigOSPolicyAssignment_update(context), + }, + { + ResourceName: "google_os_config_os_policy_assignment.primary", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"rollout.0.min_wait_duration"}, + }, + }, + }) +} + +func testAccOSConfigOSPolicyAssignment_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_os_config_os_policy_assignment" "primary" { + instance_filter { + all = false + exclusion_labels { + labels = { + label-two = "value-two" + } + } + inclusion_labels { + labels = { + label-one = "value-one" + } + } + inventories { + os_short_name = "centos" + os_version = "8.*" + } + } + + location = "us-central1-a" + name = "tf-test-policy-assignment%{random_suffix}" + + os_policies { + id = "policy" + mode = "VALIDATION" + + resource_groups { + resources { + id = "apt-to-yum" + + repository { + apt { + archive_type = "DEB" + components = ["doc"] + distribution = "debian" + uri = "https://atl.mirrors.clouvider.net/debian" + gpg_key = ".gnupg/pubring.kbx" + } + } + } + inventory_filters { + os_short_name = "centos" + os_version = "8.*" + } + + resources { + id = "exec1" + exec { + validate { + interpreter = "SHELL" + args = ["arg1"] + file { + local_path = "$HOME/script.sh" + } + output_file_path = "$HOME/out" + } + enforce { + interpreter = "SHELL" + args = ["arg1"] + file { + allow_insecure = true + remote { + uri = "https://www.example.com/script.sh" + sha256_checksum = "c7938fed83afdccbb0e86a2a2e4cad7d5035012ca3214b4a61268393635c3063" + } + } + output_file_path = "$HOME/out" + } + } + } + } + allow_no_resource_group_match = false + description = "A test os policy" + } + + rollout { + disruption_budget { + percent = 100 + } + + min_wait_duration = "3.2s" + } + + description = "A test os policy assignment" +} +`, context) +} + +func testAccOSConfigOSPolicyAssignment_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_os_config_os_policy_assignment" "primary" { + instance_filter { + all = false + inventories { + os_short_name = "centos" + os_version = "9.*" + } + } + + location = "us-central1-a" + name = "tf-test-policy-assignment%{random_suffix}" + + os_policies { + id = "policy" + mode = "ENFORCEMENT" + + resource_groups { + resources { + id = "apt-to-yum" + + repository { + yum { + id = "new-yum" + display_name = "new-yum" + base_url = "http://mirrors.rcs.alaska.edu/centos/" + gpg_keys = ["RPM-GPG-KEY-CentOS-Debug-7"] + } + } + } + inventory_filters { + os_short_name = "centos" + os_version = "8.*" + } + + resources { + id = "new-exec1" + exec { + validate { + interpreter = "POWERSHELL" + args = ["arg2"] + file { + local_path = "$HOME/script.bat" + } + output_file_path = "$HOME/out" + } + enforce { + interpreter = "POWERSHELL" + args = ["arg2"] + file { + allow_insecure = false + remote { + uri = "https://www.example.com/script.bat" + sha256_checksum = "9f8e5818ccb47024d01000db713c0a333679b64678ff5fe2d9bea0a23014dd54" + } + } + output_file_path = "$HOME/out" + } + } + } + } + allow_no_resource_group_match = true + description = "An updated test os policy" + } + + rollout { + disruption_budget { + percent = 90 + } + + min_wait_duration = "3.1s" + } + + description = "An updated test os policy assignment" +} +`, context) +} + +func testAccCheckOSConfigOSPolicyAssignmentDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_os_config_os_policy_assignment" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}OSConfigBasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}location{{"}}"}}/osPolicyAssignments/{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("OSConfigOSPolicyAssignment still exists at %s", url) + } + } + + return nil + } +} diff --git a/mmv1/third_party/terraform/services/parallelstore/go/resource_parallelstore_instance_test.go.tmpl b/mmv1/third_party/terraform/services/parallelstore/go/resource_parallelstore_instance_test.go.tmpl new file mode 100644 index 000000000000..0a632c109ff0 --- /dev/null +++ b/mmv1/third_party/terraform/services/parallelstore/go/resource_parallelstore_instance_test.go.tmpl @@ -0,0 +1,137 @@ +package parallelstore_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccParallelstoreInstance_parallelstoreInstanceBasicExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckParallelstoreInstanceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccParallelstoreInstance_parallelstoreInstanceBasicExample_basic(context), + }, + { + ResourceName: "google_parallelstore_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "instance_id", "labels", "terraform_labels"}, + }, + { + Config: testAccParallelstoreInstance_parallelstoreInstanceBasicExample_update(context), + }, + { + ResourceName: "google_parallelstore_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "instance_id", "labels", "terraform_labels"}, + }, + + }, + }) +} + +func testAccParallelstoreInstance_parallelstoreInstanceBasicExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_parallelstore_instance" "instance" { + instance_id = "instance%{random_suffix}" + location = "us-central1-a" + description = "test instance" + capacity_gib = 12000 + network = google_compute_network.network.name + reserved_ip_range = google_compute_global_address.private_ip_alloc.name + labels = { + test = "value" + } + provider = google-beta + depends_on = [google_service_networking_connection.default] +} + +resource "google_compute_network" "network" { + name = "network%{random_suffix}" + auto_create_subnetworks = true + mtu = 8896 + provider = google-beta +} + + + +# Create an IP address +resource "google_compute_global_address" "private_ip_alloc" { + name = "address%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = google_compute_network.network.id + provider = google-beta +} + +# Create a private connection +resource "google_service_networking_connection" "default" { + network = google_compute_network.network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] + provider = google-beta +} +`, context) +} + +func testAccParallelstoreInstance_parallelstoreInstanceBasicExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_parallelstore_instance" "instance" { + instance_id = "instance%{random_suffix}" + location = "us-central1-a" + description = "test instance updated" + capacity_gib = 12000 + network = google_compute_network.network.name + + labels = { + test = "value23" + } + provider = google-beta + depends_on = [google_service_networking_connection.default] +} + +resource "google_compute_network" "network" { + name = "network%{random_suffix}" + auto_create_subnetworks = true + mtu = 8896 + provider = google-beta +} + + + +# Create an IP address +resource "google_compute_global_address" "private_ip_alloc" { + name = "address%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = google_compute_network.network.id + provider = google-beta +} + +# Create a private connection +resource "google_service_networking_connection" "default" { + network = google_compute_network.network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.private_ip_alloc.name] + provider = google-beta +} +`, context) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/privilegedaccessmanager/go/resource_privileged_access_manager_entitlement_test.go.tmpl b/mmv1/third_party/terraform/services/privilegedaccessmanager/go/resource_privileged_access_manager_entitlement_test.go.tmpl new file mode 100644 index 000000000000..f09292a7a948 --- /dev/null +++ b/mmv1/third_party/terraform/services/privilegedaccessmanager/go/resource_privileged_access_manager_entitlement_test.go.tmpl @@ -0,0 +1,136 @@ +package privilegedaccessmanager_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccPrivilegedAccessManagerEntitlement_privilegedAccessManagerEntitlementProjectExample_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "project_name": envvar.GetTestProjectFromEnv(), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckPrivilegedAccessManagerEntitlementDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccPrivilegedAccessManagerEntitlement_privilegedAccessManagerEntitlementBasicExample_basic(context), + }, + { + ResourceName: "google_privileged_access_manager_entitlement.tfentitlement", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "entitlement_id", "parent"}, + }, + { + Config: testAccPrivilegedAccessManagerEntitlement_privilegedAccessManagerEntitlementBasicExample_update(context), + }, + { + ResourceName: "google_privileged_access_manager_entitlement.tfentitlement", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location", "entitlement_id", "parent"}, + }, + }, + }) +} + +func testAccPrivilegedAccessManagerEntitlement_privilegedAccessManagerEntitlementBasicExample_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_privileged_access_manager_entitlement" "tfentitlement" { + provider = google-beta + entitlement_id = "tf-test-example-entitlement%{random_suffix}" + location = "global" + max_request_duration = "43200s" + parent = "projects/%{project_name}" + requester_justification_config { + unstructured{} + } + eligible_users { + principals = ["group:test@google.com"] + } + privileged_access{ + gcp_iam_access{ + role_bindings{ + role = "roles/storage.admin" + condition_expression = "request.time < timestamp(\"2024-04-23T18:30:00.000Z\")" + } + resource = "//cloudresourcemanager.googleapis.com/projects/%{project_name}" + resource_type = "cloudresourcemanager.googleapis.com/Project" + } + } + additional_notification_targets { + admin_email_recipients = ["user@example.com"] + requester_email_recipients = ["user@example.com"] + } + approval_workflow { + manual_approvals { + require_approver_justification = true + steps { + approvals_needed = 1 + approver_email_recipients = ["user@example.com"] + approvers { + principals = ["group:test@google.com"] + } + } + } + } +} +`, context) +} + +func testAccPrivilegedAccessManagerEntitlement_privilegedAccessManagerEntitlementBasicExample_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_privileged_access_manager_entitlement" "tfentitlement" { + provider = google-beta + entitlement_id = "tf-test-example-entitlement%{random_suffix}" + location = "global" + max_request_duration = "4300s" + parent = "projects/%{project_name}" + requester_justification_config { + not_mandatory{} + } + eligible_users { + principals = ["group:test@google.com"] + } + privileged_access{ + gcp_iam_access{ + role_bindings{ + role = "roles/storage.admin" + condition_expression = "request.time < timestamp(\"2024-04-23T18:30:00.000Z\")" + } + resource = "//cloudresourcemanager.googleapis.com/projects/%{project_name}" + resource_type = "cloudresourcemanager.googleapis.com/Project" + } + } + additional_notification_targets { + admin_email_recipients = ["user1@example.com"] + requester_email_recipients = ["user2@example.com"] + } + approval_workflow { + manual_approvals { + require_approver_justification = false + steps { + approvals_needed = 1 + approver_email_recipients = ["user3@example.com"] + approvers { + principals = ["group:test@google.com"] + } + } + } + } +} +`, context) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/redis/go/resource_redis_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/redis/go/resource_redis_cluster_test.go.tmpl new file mode 100644 index 000000000000..968e13c777e6 --- /dev/null +++ b/mmv1/third_party/terraform/services/redis/go/resource_redis_cluster_test.go.tmpl @@ -0,0 +1,299 @@ +package redis_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccRedisCluster_createClusterWithNodeType(t *testing.T) { + t.Parallel() + + name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckRedisClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + // create cluster with replica count 1 + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 1, shardCount: 3, preventDestroy: true, nodeType: "REDIS_STANDARD_SMALL", zoneDistributionMode: "MULTI_ZONE"}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // clean up the resource + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: false, nodeType: "REDIS_STANDARD_SMALL", zoneDistributionMode: "MULTI_ZONE"}), + }, + }, + }) +} + + +// Validate zone distribution for the cluster. +func TestAccRedisCluster_createClusterWithZoneDistribution(t *testing.T) { + t.Parallel() + + name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckRedisClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + // create cluster with replica count 1 + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: false, zoneDistributionMode: "SINGLE_ZONE", zone: "us-central1-b"}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // clean up the resource + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: false, zoneDistributionMode: "SINGLE_ZONE", zone: "us-central1-b"}), + }, + }, + }) +} + +// Validate that replica count is updated for the cluster +func TestAccRedisCluster_updateReplicaCount(t *testing.T) { + t.Parallel() + + name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckRedisClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + // create cluster with replica count 1 + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 1, shardCount: 3, preventDestroy: true, zoneDistributionMode: "MULTI_ZONE"}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // update replica count to 2 + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 2, shardCount: 3, preventDestroy: true, zoneDistributionMode: "MULTI_ZONE"}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // clean up the resource + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 1, shardCount: 3, preventDestroy: false, zoneDistributionMode: "MULTI_ZONE"}), + }, + { + // update replica count to 0 + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: true, zoneDistributionMode: "MULTI_ZONE"}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // clean up the resource + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 0, shardCount: 3, preventDestroy: false, zoneDistributionMode: "MULTI_ZONE"}), + }, + }, + }) +} + +// Validate that shard count is updated for the cluster +func TestAccRedisCluster_updateShardCount(t *testing.T) { + t.Parallel() + + name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckRedisClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + // create cluster with shard count 3 + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 1, shardCount: 3, preventDestroy: true, zoneDistributionMode: "MULTI_ZONE"}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // update shard count to 5 + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 1, shardCount: 5, preventDestroy: true, zoneDistributionMode: "MULTI_ZONE"}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // clean up the resource + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, replicaCount: 1, shardCount: 5, preventDestroy: false, zoneDistributionMode: "MULTI_ZONE"}), + }, + }, + }) +} + +// Validate that redisConfigs is updated for the cluster +func TestAccRedisCluster_updateRedisConfigs(t *testing.T) { + t.Parallel() + + name := fmt.Sprintf("tf-test-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckRedisClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + // create cluster + Config: createOrUpdateRedisCluster(&ClusterParams{ + name: name, + shardCount: 3, + zoneDistributionMode: "MULTI_ZONE", + redisConfigs: map[string]string{ + "maxmemory-policy": "volatile-ttl", + }}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // add a new redis config key-value pair and update existing redis config + Config: createOrUpdateRedisCluster(&ClusterParams{ + name: name, + shardCount: 3, + zoneDistributionMode: "MULTI_ZONE", + redisConfigs: map[string]string{ + "maxmemory-policy": "allkeys-lru", + "maxmemory-clients": "90%", + }}), + }, + { + ResourceName: "google_redis_cluster.test", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"psc_configs"}, + }, + { + // remove all redis configs + Config: createOrUpdateRedisCluster(&ClusterParams{name: name, shardCount: 3, zoneDistributionMode: "MULTI_ZONE"}), + }, + + }, + }) +} + +type ClusterParams struct { + name string + replicaCount int + shardCount int + preventDestroy bool + nodeType string + redisConfigs map[string]string + zoneDistributionMode string + zone string +} + +func createOrUpdateRedisCluster(params *ClusterParams) string { + lifecycleBlock := "" + if params.preventDestroy { + lifecycleBlock = ` + lifecycle { + prevent_destroy = true + }` + } + var strBuilder strings.Builder + for key, value := range params.redisConfigs { + strBuilder.WriteString(fmt.Sprintf("%s = \"%s\"\n", key, value)) + } + + zoneDistributionConfigBlock := `` + if params.zoneDistributionMode != "" { + zoneDistributionConfigBlock = fmt.Sprintf(` + zone_distribution_config { + mode = "%s" + zone = "%s" + } + `, params.zoneDistributionMode, params.zone) + } + + return fmt.Sprintf(` +resource "google_redis_cluster" "test" { + provider = google-beta + name = "%s" + replica_count = %d + shard_count = %d + node_type = "%s" + region = "us-central1" + psc_configs { + network = google_compute_network.producer_net.id + } + redis_configs = { + %s + } + %s + depends_on = [ + google_network_connectivity_service_connection_policy.default + ] + %s +} + +resource "google_network_connectivity_service_connection_policy" "default" { + provider = google-beta + name = "%s" + location = "us-central1" + service_class = "gcp-memorystore-redis" + description = "my basic service connection policy" + network = google_compute_network.producer_net.id + psc_config { + subnetworks = [google_compute_subnetwork.producer_subnet.id] + } +} + +resource "google_compute_subnetwork" "producer_subnet" { + provider = google-beta + name = "%s" + ip_cidr_range = "10.0.0.248/29" + region = "us-central1" + network = google_compute_network.producer_net.id +} + +resource "google_compute_network" "producer_net" { + provider = google-beta + name = "%s" + auto_create_subnetworks = false +} +`, params.name, params.replicaCount, params.shardCount, params.nodeType, strBuilder.String(), zoneDistributionConfigBlock, lifecycleBlock, params.name, params.name, params.name) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_config.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_config.go.tmpl new file mode 100644 index 000000000000..ed5b54a9663d --- /dev/null +++ b/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_config.go.tmpl @@ -0,0 +1,43 @@ +package runtimeconfig + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func DataSourceGoogleRuntimeconfigConfig() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceRuntimeconfigConfig().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleRuntimeconfigConfigRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleRuntimeconfigConfigRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/configs/{{"{{"}}name{{"}}"}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceRuntimeconfigConfigRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + return nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_config_test.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_config_test.go.tmpl new file mode 100644 index 000000000000..6f6d76568e12 --- /dev/null +++ b/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_config_test.go.tmpl @@ -0,0 +1,43 @@ +package runtimeconfig_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccRuntimeconfigConfigDatasource_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigDatasourceConfig(acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_runtimeconfig_config.default", "google_runtimeconfig_config.default"), + ), + }, + }, + }) +} + +func testAccRuntimeconfigDatasourceConfig(suffix string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "default" { + name = "runtime-%s" + description = "runtime-%s" +} + +data "google_runtimeconfig_config" "default" { + name = google_runtimeconfig_config.default.name +} +`, suffix, suffix) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_variable.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_variable.go.tmpl new file mode 100644 index 000000000000..e4be153f05c6 --- /dev/null +++ b/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_variable.go.tmpl @@ -0,0 +1,46 @@ +package runtimeconfig + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "fmt" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func DataSourceGoogleRuntimeconfigVariable() *schema.Resource { + + dsSchema := tpgresource.DatasourceSchemaFromResourceSchema(ResourceRuntimeconfigVariable().Schema) + tpgresource.AddRequiredFieldsToSchema(dsSchema, "name") + tpgresource.AddRequiredFieldsToSchema(dsSchema, "parent") + tpgresource.AddOptionalFieldsToSchema(dsSchema, "project") + + return &schema.Resource{ + Read: dataSourceGoogleRuntimeconfigVariableRead, + Schema: dsSchema, + } +} + +func dataSourceGoogleRuntimeconfigVariableRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + + id, err := tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/configs/{{"{{"}}parent{{"}}"}}/variables/{{"{{"}}name{{"}}"}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + err = resourceRuntimeconfigVariableRead(d, meta) + if err != nil { + return err + } + + if d.Id() == "" { + return fmt.Errorf("%s not found", id) + } + return nil +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_variable_test.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_variable_test.go.tmpl new file mode 100644 index 000000000000..a61f996d87a0 --- /dev/null +++ b/mmv1/third_party/terraform/services/runtimeconfig/go/data_source_runtimeconfig_variable_test.go.tmpl @@ -0,0 +1,50 @@ +package runtimeconfig_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccRuntimeconfigVariableDatasource_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigDatasourceVariable(acctest.RandString(t, 10), acctest.RandString(t, 10), acctest.RandString(t, 10)), + Check: resource.ComposeTestCheckFunc( + acctest.CheckDataSourceStateMatchesResourceState("data.google_runtimeconfig_variable.default", "google_runtimeconfig_variable.default"), + ), + }, + }, + }) +} + +func testAccRuntimeconfigDatasourceVariable(suffix string, name string, text string) string { + return fmt.Sprintf(` + resource "google_runtimeconfig_config" "default" { + name = "runtime-%s" + description = "runtime-%s" + } + + resource "google_runtimeconfig_variable" "default" { + parent = google_runtimeconfig_config.default.name + name = "%s" + text = "%s" + } + + data "google_runtimeconfig_variable" "default" { + name = google_runtimeconfig_variable.default.name + parent = google_runtimeconfig_config.default.name + } +`, suffix, suffix, name, text) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_config.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_config.go.tmpl new file mode 100644 index 000000000000..956ee320b37a --- /dev/null +++ b/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_config.go.tmpl @@ -0,0 +1,207 @@ +package runtimeconfig + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + "regexp" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + runtimeconfig "google.golang.org/api/runtimeconfig/v1beta1" +) + +var runtimeConfigFullName *regexp.Regexp = regexp.MustCompile("^projects/([^/]+)/configs/(.+)$") + +func ResourceRuntimeconfigConfig() *schema.Resource { + return &schema.Resource{ + Create: resourceRuntimeconfigConfigCreate, + Read: resourceRuntimeconfigConfigRead, + Update: resourceRuntimeconfigConfigUpdate, + Delete: resourceRuntimeconfigConfigDelete, + + Importer: &schema.ResourceImporter{ + State: resourceRuntimeconfigConfigImport, + }, + + CustomizeDiff: customdiff.All( + tpgresource.DefaultProviderProject, + ), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateFunc: verify.ValidateRegexp("[0-9A-Za-z](?:[_.A-Za-z0-9-]{0,62}[_.A-Za-z0-9])?"), + Description: `The name of the runtime config.`, + }, + + "description": { + Type: schema.TypeString, + Optional: true, + Description: `The description to associate with the runtime config.`, + }, + + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The ID of the project in which the resource belongs. If it is not provided, the provider project is used.`, + }, + }, + UseJSONNumber: true, + } +} + +func resourceRuntimeconfigConfigCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + name := d.Get("name").(string) + fullName := resourceRuntimeconfigFullName(project, name) + runtimeConfig := runtimeconfig.RuntimeConfig{ + Name: fullName, + } + + if val, ok := d.GetOk("description"); ok { + runtimeConfig.Description = val.(string) + } + + _, err = config.NewRuntimeconfigClient(userAgent).Projects.Configs.Create("projects/"+project, &runtimeConfig).Do() + + if err != nil { + return err + } + d.SetId(fullName) + + return nil +} + +func resourceRuntimeconfigConfigRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + fullName := d.Id() + runConfig, err := config.NewRuntimeconfigClient(userAgent).Projects.Configs.Get(fullName).Do() + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("RuntimeConfig %q", d.Id())) + } + + project, name, err := resourceRuntimeconfigParseFullName(runConfig.Name) + if err != nil { + return err + } + // Check to see if project matches our current defined value - if it doesn't, we'll explicitly set it + curProject, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + if project != curProject { + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + } + + if err := d.Set("name", name); err != nil { + return fmt.Errorf("Error setting name: %s", err) + } + if err := d.Set("description", runConfig.Description); err != nil { + return fmt.Errorf("Error setting description: %s", err) + } + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + + return nil +} + +func resourceRuntimeconfigConfigUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + // Update works more like an 'overwrite' method - we build a new runtimeconfig.RuntimeConfig struct and it becomes + // the new config. This means our Update logic looks an awful lot like Create (and hence, doesn't use + // schema.ResourceData.hasChange()). + fullName := d.Id() + runtimeConfig := runtimeconfig.RuntimeConfig{ + Name: fullName, + } + if v, ok := d.GetOk("description"); ok { + runtimeConfig.Description = v.(string) + } + + _, err = config.NewRuntimeconfigClient(userAgent).Projects.Configs.Update(fullName, &runtimeConfig).Do() + if err != nil { + return err + } + return nil +} + +func resourceRuntimeconfigConfigDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + fullName := d.Id() + + _, err = config.NewRuntimeconfigClient(userAgent).Projects.Configs.Delete(fullName).Do() + if err != nil { + return err + } + d.SetId("") + return nil +} + +func resourceRuntimeconfigConfigImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*transport_tpg.Config) + if err := tpgresource.ParseImportId([]string{"projects/(?P[^/]+)/configs/(?P[^/]+)", "(?P[^/]+)"}, d, config); err != nil { + return nil, err + } + + // Replace import id for the resource id + id, err := tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/configs/{{"{{"}}name{{"}}"}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil +} + +// resourceRuntimeconfigFullName turns a given project and a 'short name' for a runtime config into a full name +// (e.g. projects/my-project/configs/my-config). +func resourceRuntimeconfigFullName(project, name string) string { + return fmt.Sprintf("projects/%s/configs/%s", project, name) +} + +// resourceRuntimeconfigParseFullName parses a full name (e.g. projects/my-project/configs/my-config) by parsing out the +// project and the short name. Returns "", "", nil upon error. +func resourceRuntimeconfigParseFullName(fullName string) (project, name string, err error) { + matches := runtimeConfigFullName.FindStringSubmatch(fullName) + if matches == nil { + return "", "", fmt.Errorf("Given full name doesn't match expected regexp; fullname = '%s'", fullName) + } + return matches[1], matches[2], nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_config_test.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_config_test.go.tmpl new file mode 100644 index 000000000000..24d55ab4bbda --- /dev/null +++ b/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_config_test.go.tmpl @@ -0,0 +1,179 @@ +package runtimeconfig_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + runtimeconfig "google.golang.org/api/runtimeconfig/v1beta1" +) + +func TestAccRuntimeconfigConfig_basic(t *testing.T) { + t.Parallel() + + var runtimeConfig runtimeconfig.RuntimeConfig + configName := fmt.Sprintf("runtimeconfig-test-%s", acctest.RandString(t, 10)) + description := "my test description" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRuntimeconfigConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigConfig_basicDescription(configName, description), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + t, "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, description), + ), + }, + { + ResourceName: "google_runtimeconfig_config.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccRuntimeconfig_update(t *testing.T) { + t.Parallel() + + var runtimeConfig runtimeconfig.RuntimeConfig + configName := fmt.Sprintf("runtimeconfig-test-%s", acctest.RandString(t, 10)) + firstDescription := "my test description" + secondDescription := "my updated test description" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRuntimeconfigConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigConfig_basicDescription(configName, firstDescription), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + t, "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, firstDescription), + ), + }, { + Config: testAccRuntimeconfigConfig_basicDescription(configName, secondDescription), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + t, "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, secondDescription), + ), + }, + }, + }) +} + +func TestAccRuntimeconfig_updateEmptyDescription(t *testing.T) { + t.Parallel() + + var runtimeConfig runtimeconfig.RuntimeConfig + configName := fmt.Sprintf("runtimeconfig-test-%s", acctest.RandString(t, 10)) + description := "my test description" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRuntimeconfigConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigConfig_basicDescription(configName, description), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + t, "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, description), + ), + }, { + Config: testAccRuntimeconfigConfig_emptyDescription(configName), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeConfigExists( + t, "google_runtimeconfig_config.foobar", &runtimeConfig), + testAccCheckRuntimeConfigDescription(&runtimeConfig, ""), + ), + }, + }, + }) +} + +func testAccCheckRuntimeConfigDescription(runtimeConfig *runtimeconfig.RuntimeConfig, description string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if runtimeConfig.Description != description { + return fmt.Errorf("On runtime config '%s', expected description '%s', but found '%s'", + runtimeConfig.Name, description, runtimeConfig.Description) + } + return nil + } +} + +func testAccCheckRuntimeConfigExists(t *testing.T, resourceName string, runtimeConfig *runtimeconfig.RuntimeConfig) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := acctest.GoogleProviderConfig(t) + + found, err := config.NewRuntimeconfigClient(config.UserAgent).Projects.Configs.Get(rs.Primary.ID).Do() + if err != nil { + return err + } + + *runtimeConfig = *found + + return nil + } +} + +func testAccCheckRuntimeconfigConfigDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_runtimeconfig_config" { + continue + } + + _, err := config.NewRuntimeconfigClient(config.UserAgent).Projects.Configs.Get(rs.Primary.ID).Do() + + if err == nil { + return fmt.Errorf("Runtimeconfig still exists") + } + } + + return nil + } +} + +func testAccRuntimeconfigConfig_basicDescription(name, description string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "%s" + description = "%s" +} +`, name, description) +} + +func testAccRuntimeconfigConfig_emptyDescription(name string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "%s" +} +`, name) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_variable.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_variable.go.tmpl new file mode 100644 index 000000000000..78fb3b12229f --- /dev/null +++ b/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_variable.go.tmpl @@ -0,0 +1,253 @@ +package runtimeconfig + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + "regexp" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + runtimeconfig "google.golang.org/api/runtimeconfig/v1beta1" +) + +func ResourceRuntimeconfigVariable() *schema.Resource { + return &schema.Resource{ + Create: resourceRuntimeconfigVariableCreate, + Read: resourceRuntimeconfigVariableRead, + Update: resourceRuntimeconfigVariableUpdate, + Delete: resourceRuntimeconfigVariableDelete, + + Importer: &schema.ResourceImporter{ + State: resourceRuntimeconfigVariableImport, + }, + + CustomizeDiff: customdiff.All( + tpgresource.DefaultProviderProject, + ), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the variable to manage. Note that variable names can be hierarchical using slashes (e.g. "prod-variables/hostname").`, + }, + + "parent": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the RuntimeConfig resource containing this variable.`, + }, + + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The ID of the project in which the resource belongs. If it is not provided, the provider project is used.`, + }, + + "value": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + ExactlyOneOf: []string{"text", "value"}, + }, + + "text": { + Type: schema.TypeString, + Optional: true, + Sensitive: true, + ExactlyOneOf: []string{"text", "value"}, + }, + + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: `The timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds, representing when the variable was last updated. Example: "2016-10-09T12:33:37.578138407Z".`, + }, + }, + UseJSONNumber: true, + } +} + +func resourceRuntimeconfigVariableCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + variable, parent, err := newRuntimeconfigVariableFromResourceData(d, project) + if err != nil { + return err + } + + createdVariable, err := config.NewRuntimeconfigClient(userAgent).Projects.Configs.Variables.Create(resourceRuntimeconfigFullName(project, parent), variable).Do() + if err != nil { + return err + } + d.SetId(createdVariable.Name) + + return setRuntimeConfigVariableToResourceData(d, *createdVariable) +} + +func resourceRuntimeconfigVariableRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + fullName := d.Id() + createdVariable, err := config.NewRuntimeconfigClient(userAgent).Projects.Configs.Variables.Get(fullName).Do() + if err != nil { + return err + } + + return setRuntimeConfigVariableToResourceData(d, *createdVariable) +} + +func resourceRuntimeconfigVariableUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + // Update works more like an 'overwrite' method - we build a new runtimeconfig.Variable struct and it becomes the + // new config. This means our Update logic looks an awful lot like Create (and hence, doesn't use + // schema.ResourceData.hasChange()). + + variable, _, err := newRuntimeconfigVariableFromResourceData(d, project) + if err != nil { + return err + } + + createdVariable, err := config.NewRuntimeconfigClient(userAgent).Projects.Configs.Variables.Update(variable.Name, variable).Do() + if err != nil { + return err + } + + return setRuntimeConfigVariableToResourceData(d, *createdVariable) +} + +func resourceRuntimeconfigVariableDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + fullName := d.Id() + + _, err = config.NewRuntimeconfigClient(userAgent).Projects.Configs.Variables.Delete(fullName).Do() + if err != nil { + return err + } + d.SetId("") + + return nil +} + +func resourceRuntimeconfigVariableImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*transport_tpg.Config) + if err := tpgresource.ParseImportId([]string{"projects/(?P[^/]+)/configs/(?P[^/]+)/variables/(?P[^/]+)", "(?P[^/]+)/(?P[^/]+)"}, d, config); err != nil { + return nil, err + } + + // Replace import id for the resource id + id, err := tpgresource.ReplaceVars(d, config, "projects/{{"{{"}}project{{"}}"}}/configs/{{"{{"}}parent{{"}}"}}/variables/{{"{{"}}name{{"}}"}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil +} + +// resourceRuntimeconfigVariableFullName turns a given project, runtime config name, and a 'short name' for a runtime +// config variable into a full name (e.g. projects/my-project/configs/my-config/variables/my-variable). +func resourceRuntimeconfigVariableFullName(project, config, name string) string { + return fmt.Sprintf("projects/%s/configs/%s/variables/%s", project, config, name) +} + +// resourceRuntimeconfigVariableParseFullName parses a full name +// (e.g. projects/my-project/configs/my-config/variables/my-variable) by parsing out the +// project, runtime config name, and the short name. Returns "", "", "", err upon error. +func resourceRuntimeconfigVariableParseFullName(fullName string) (project, config, name string, err error) { + re := regexp.MustCompile("^projects/([^/]+)/configs/([^/]+)/variables/(.+)$") + matches := re.FindStringSubmatch(fullName) + if matches == nil { + return "", "", "", fmt.Errorf("Given full name doesn't match expected regexp; fullname = '%s'", fullName) + } + return matches[1], matches[2], matches[3], nil +} + +// newRuntimeconfigVariableFromResourceData builds a new runtimeconfig.Variable struct from the data stored in a +// schema.ResourceData. Also returns the full name of the parent. Returns nil, "", err upon error. +func newRuntimeconfigVariableFromResourceData(d *schema.ResourceData, project string) (variable *runtimeconfig.Variable, parent string, err error) { + + text := d.Get("text") + value := d.Get("value") + + // TODO(selmanj) here we assume it's a simple name, not a full name. Should probably support full name as well + parent = d.Get("parent").(string) + name := d.Get("name").(string) + + fullName := resourceRuntimeconfigVariableFullName(project, parent, name) + + variable = &runtimeconfig.Variable{ + Name: fullName, + } + + if text != "" { + variable.Text = text.(string) + } else { + variable.Value = value.(string) + } + + return variable, parent, nil +} + +// setRuntimeConfigVariableToResourceData stores a provided runtimeconfig.Variable struct inside a schema.ResourceData. +func setRuntimeConfigVariableToResourceData(d *schema.ResourceData, variable runtimeconfig.Variable) error { + varProject, parent, name, err := resourceRuntimeconfigVariableParseFullName(variable.Name) + if err != nil { + return err + } + if err := d.Set("name", name); err != nil { + return fmt.Errorf("Error setting name: %s", err) + } + if err := d.Set("parent", parent); err != nil { + return fmt.Errorf("Error setting parent: %s", err) + } + if err := d.Set("project", varProject); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + if err := d.Set("value", variable.Value); err != nil { + return fmt.Errorf("Error setting value: %s", err) + } + if err := d.Set("text", variable.Text); err != nil { + return fmt.Errorf("Error setting text: %s", err) + } + if err := d.Set("update_time", variable.UpdateTime); err != nil { + return fmt.Errorf("Error setting update_time: %s", err) + } + + return nil +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_variable_test.go.tmpl b/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_variable_test.go.tmpl new file mode 100644 index 000000000000..191290c86cb6 --- /dev/null +++ b/mmv1/third_party/terraform/services/runtimeconfig/go/resource_runtimeconfig_variable_test.go.tmpl @@ -0,0 +1,243 @@ +package runtimeconfig_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "fmt" + "testing" + "time" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + runtimeconfig "google.golang.org/api/runtimeconfig/v1beta1" +) + +func TestAccRuntimeconfigVariable_basic(t *testing.T) { + t.Parallel() + + var variable runtimeconfig.Variable + + varName := fmt.Sprintf("variable-test-%s", acctest.RandString(t, 10)) + varText := "this is my test value" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRuntimeconfigVariableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigVariable_basicText(acctest.RandString(t, 10), varName, varText), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeconfigVariableExists( + t, "google_runtimeconfig_variable.foobar", &variable), + testAccCheckRuntimeconfigVariableText(&variable, varText), + testAccCheckRuntimeconfigVariableUpdateTime("google_runtimeconfig_variable.foobar"), + ), + }, + { + ResourceName: "google_runtimeconfig_variable.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccRuntimeconfigVariable_basicUpdate(t *testing.T) { + t.Parallel() + + var variable runtimeconfig.Variable + + configName := fmt.Sprintf("some-name-%s", acctest.RandString(t, 10)) + varName := fmt.Sprintf("variable-test-%s", acctest.RandString(t, 10)) + varText := "this is my test value" + varText2 := "this is my updated value" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRuntimeconfigVariableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigVariable_basicTextUpdate(configName, varName, varText), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeconfigVariableExists( + t, "google_runtimeconfig_variable.foobar", &variable), + testAccCheckRuntimeconfigVariableText(&variable, varText), + ), + }, { + Config: testAccRuntimeconfigVariable_basicTextUpdate(configName, varName, varText2), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeconfigVariableExists( + t, "google_runtimeconfig_variable.foobar", &variable), + testAccCheckRuntimeconfigVariableText(&variable, varText2), + ), + }, + }, + }) +} + +func TestAccRuntimeconfigVariable_basicValue(t *testing.T) { + t.Parallel() + + var variable runtimeconfig.Variable + + varName := fmt.Sprintf("variable-test-%s", acctest.RandString(t, 10)) + varValue := "Zm9vYmFyCg==" + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckRuntimeconfigVariableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccRuntimeconfigVariable_basicValue(acctest.RandString(t, 10), varName, varValue), + Check: resource.ComposeTestCheckFunc( + testAccCheckRuntimeconfigVariableExists( + t, "google_runtimeconfig_variable.foobar", &variable), + testAccCheckRuntimeconfigVariableValue(&variable, varValue), + testAccCheckRuntimeconfigVariableUpdateTime("google_runtimeconfig_variable.foobar"), + ), + }, + { + ResourceName: "google_runtimeconfig_variable.foobar", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccCheckRuntimeconfigVariableExists(t *testing.T, resourceName string, variable *runtimeconfig.Variable) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No ID is set") + } + + config := acctest.GoogleProviderConfig(t) + + found, err := config.NewRuntimeconfigClient(config.UserAgent).Projects.Configs.Variables.Get(rs.Primary.ID).Do() + if err != nil { + return err + } + + *variable = *found + + return nil + } +} + +func testAccCheckRuntimeconfigVariableUpdateTime(resourceName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[resourceName] + if !ok { + return fmt.Errorf("Not found: %s", resourceName) + } + + updateTime := rs.Primary.Attributes["update_time"] + if updateTime == "" { + return fmt.Errorf("No update time set for resource %s", resourceName) + } + + // Make sure it's a valid rfc 3339 date + _, err := time.Parse(time.RFC3339, updateTime) + if err != nil { + return fmt.Errorf("Error while parsing update time for resource %s: %s", resourceName, err.Error()) + } + + return nil + } +} + +func testAccCheckRuntimeconfigVariableText(variable *runtimeconfig.Variable, text string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if variable.Text != text { + return fmt.Errorf("Variable %s had incorrect text: expected '%s' but found '%s'", variable.Name, + text, variable.Text) + } + + return nil + } +} + +func testAccCheckRuntimeconfigVariableValue(variable *runtimeconfig.Variable, value string) resource.TestCheckFunc { + return func(s *terraform.State) error { + if variable.Value != value { + return fmt.Errorf("Variable %s had incorrect value: expected '%s' but found '%s'", variable.Name, + value, variable.Value) + } + + return nil + } +} + +func testAccCheckRuntimeconfigVariableDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_runtimeconfig_variable" { + continue + } + + _, err := config.NewRuntimeconfigClient(config.UserAgent).Projects.Configs.Variables.Get(rs.Primary.ID).Do() + + if err == nil { + return fmt.Errorf("Runtimeconfig variable still exists") + } + } + + return nil + } +} + +func testAccRuntimeconfigVariable_basicText(suffix, name, text string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "some-config-%s" +} + +resource "google_runtimeconfig_variable" "foobar" { + parent = google_runtimeconfig_config.foobar.name + name = "%s" + text = "%s" +} +`, suffix, name, text) +} + +func testAccRuntimeconfigVariable_basicTextUpdate(configName, name, text string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "%s" +} + +resource "google_runtimeconfig_variable" "foobar" { + parent = google_runtimeconfig_config.foobar.name + name = "%s" + text = "%s" +} +`, configName, name, text) +} + +func testAccRuntimeconfigVariable_basicValue(suffix, name, value string) string { + return fmt.Sprintf(` +resource "google_runtimeconfig_config" "foobar" { + name = "some-config-%s" +} + +resource "google_runtimeconfig_variable" "foobar" { + parent = google_runtimeconfig_config.foobar.name + name = "%s" + value = "%s" +} +`, suffix, name, value) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/secretmanager/go/iam_secret_manager_secret_test.go b/mmv1/third_party/terraform/services/secretmanager/go/iam_secret_manager_secret_test.go new file mode 100644 index 000000000000..ee567f13d1d0 --- /dev/null +++ b/mmv1/third_party/terraform/services/secretmanager/go/iam_secret_manager_secret_test.go @@ -0,0 +1,118 @@ +package secretmanager_test + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccSecretManagerSecretIam_iamMemberConditionUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "role": "roles/secretmanager.secretAccessor", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecretIam_iamMemberCondition_basic(context), + }, + { + ResourceName: "google_secret_manager_secret_iam_member.default", + ImportStateId: fmt.Sprintf("projects/%s/secrets/%s %s serviceAccount:%s %s", envvar.GetTestProjectFromEnv(), fmt.Sprintf("tf-test-secret-%s", context["random_suffix"]), context["role"], fmt.Sprintf("tf-test-sa-%s@%s.iam.gserviceaccount.com", context["random_suffix"], envvar.GetTestProjectFromEnv()), fmt.Sprintf("tf-test-condition-%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccSecretManagerSecretIam_iamMemberCondition_update(context), + }, + { + ResourceName: "google_secret_manager_secret_iam_member.default", + ImportStateId: fmt.Sprintf("projects/%s/secrets/%s %s serviceAccount:%s %s", envvar.GetTestProjectFromEnv(), fmt.Sprintf("tf-test-secret-%s", context["random_suffix"]), context["role"], fmt.Sprintf("tf-test-sa-%s@%s.iam.gserviceaccount.com", context["random_suffix"], envvar.GetTestProjectFromEnv()), fmt.Sprintf("tf-test-condition-new-%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccSecretManagerSecretIam_iamMemberCondition_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_service_account" "default" { + account_id = "tf-test-sa-%{random_suffix}" + display_name = "Secret manager IAM testing account" +} + +resource "google_secret_manager_secret" "default" { + secret_id = "tf-test-secret-%{random_suffix}" + ttl = "3600s" + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } +} + +resource "google_secret_manager_secret_iam_member" "default" { + secret_id = google_secret_manager_secret.default.id + role = "%{role}" + member = "serviceAccount:${google_service_account.default.email}" + condition { + title = "tf-test-condition-%{random_suffix}" + description = "test condition" + expression = "request.time < timestamp(\"2022-03-01T00:00:00Z\")" + } +} +`, context) +} + +func testAccSecretManagerSecretIam_iamMemberCondition_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_service_account" "default" { + account_id = "tf-test-sa-%{random_suffix}" + display_name = "Secret manager IAM testing account" +} + +resource "google_secret_manager_secret" "default" { + secret_id = "tf-test-secret-%{random_suffix}" + ttl = "3600s" + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } +} + +resource "google_secret_manager_secret_iam_member" "default" { + secret_id = google_secret_manager_secret.default.id + role = "%{role}" + member = "serviceAccount:${google_service_account.default.email}" + condition { + title = "tf-test-condition-new-%{random_suffix}" + description = "test new condition" + expression = "request.time < timestamp(\"2024-03-01T00:00:00Z\")" + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/secretmanager/go/resource_secret_manager_secret_test.go b/mmv1/third_party/terraform/services/secretmanager/go/resource_secret_manager_secret_test.go new file mode 100644 index 000000000000..c0ce2fcda347 --- /dev/null +++ b/mmv1/third_party/terraform/services/secretmanager/go/resource_secret_manager_secret_test.go @@ -0,0 +1,1222 @@ +package secretmanager_test + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccSecretManagerSecret_import(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecret_basic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_cmek(t *testing.T) { + t.Parallel() + + kmscentral := acctest.BootstrapKMSKeyInLocation(t, "us-central1") + kmseast := acctest.BootstrapKMSKeyInLocation(t, "us-east1") + context1 := map[string]interface{}{ + "pid": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "kms_key_name_central": kmscentral.CryptoKey.Name, + "kms_key_name_east": kmseast.CryptoKey.Name, + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretMangerSecret_cmek(context1), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_annotationsUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecret_annotationsBasic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, + }, + { + Config: testAccSecretManagerSecret_annotationsUpdate(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, + }, + { + Config: testAccSecretManagerSecret_annotationsBasic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-with-annotations", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels", "annotations"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_versionAliasesUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecret_basicWithSecretVersions(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_versionAliasesBasic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_versionAliasesUpdate(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_basicWithSecretVersions(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_userManagedCmekUpdate(t *testing.T) { + t.Parallel() + + kmscentral := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key1") + kmseast := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-east1", "tf-secret-manager-managed-east-key1") + kmscentralother := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "us-central1", "tf-secret-manager-managed-central-key2") + context := map[string]interface{}{ + "pid": envvar.GetTestProjectFromEnv(), + "random_suffix": acctest.RandString(t, 10), + "kms_key_name_central": kmscentral.CryptoKey.Name, + "kms_key_name_east": kmseast.CryptoKey.Name, + "kms_key_name_central_other": kmscentralother.CryptoKey.Name, + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretMangerSecret_userManagedCmekBasic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretMangerSecret_userManagedCmekUpdate(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretMangerSecret_userManagedCmekUpdate2(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretMangerSecret_userManagedCmekBasic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_automaticCmekUpdate(t *testing.T) { + t.Parallel() + + suffix := acctest.RandString(t, 10) + key1 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-secret-manager-automatic-key1") + key2 := acctest.BootstrapKMSKeyWithPurposeInLocationAndName(t, "ENCRYPT_DECRYPT", "global", "tf-secret-manager-automatic-key2") + context := map[string]interface{}{ + "pid": envvar.GetTestProjectFromEnv(), + "random_suffix": suffix, + "kms_key_name_1": key1.CryptoKey.Name, + "kms_key_name_2": key2.CryptoKey.Name, + } + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretMangerSecret_automaticCmekBasic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretMangerSecret_automaticCmekUpdate(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretMangerSecret_automaticCmekUpdate2(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretMangerSecret_automaticCmekBasic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_rotationPeriodUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "timestamp": "2122-11-26T19:58:16Z", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecret_withoutRotationPeriod(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl"}, + }, + { + Config: testAccSecretManagerSecret_rotationPeriodBasic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl"}, + }, + { + Config: testAccSecretManagerSecret_rotationPeriodUpdate(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl"}, + }, + { + Config: testAccSecretManagerSecret_withoutRotationPeriod(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_ttlUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecret_withoutTtl(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_basic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_ttlUpdate(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_withoutTtl(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_versionDestroyTtlUpdate(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecret_withoutVersionDestroyTtl(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_versionDestroyTtlUpdate(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_withoutVersionDestroyTtl(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccSecretManagerSecret_updateBetweenTtlAndExpireTime(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecret_basic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_expireTime(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + { + Config: testAccSecretManagerSecret_basic(context), + }, + { + ResourceName: "google_secret_manager_secret.secret-basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ttl", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccSecretManagerSecret_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + + ttl = "3600s" + +} +`, context) +} + +func testAccSecretMangerSecret_cmek(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + project_id = "%{pid}" +} +resource "google_project_iam_member" "kms-secret-binding" { + project = data.google_project.project.project_id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + replication { + user_managed { + replicas { + location = "us-central1" + customer_managed_encryption { + kms_key_name = "%{kms_key_name_central}" + } + } + replicas { + location = "us-east1" + customer_managed_encryption { + kms_key_name = "%{kms_key_name_east}" + } + } + + } + } + project = google_project_iam_member.kms-secret-binding.project +} +`, context) +} + +func testAccSecretManagerSecret_annotationsBasic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-with-annotations" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + annotations = { + key1 = "someval" + key2 = "someval2" + key3 = "someval3" + key4 = "someval4" + key5 = "someval5" + } + + replication { + auto {} + } +} +`, context) +} + +func testAccSecretManagerSecret_annotationsUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-with-annotations" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + annotations = { + key1 = "someval" + key2update = "someval2" + key3 = "someval3update" + key4update = "someval4update" + } + + replication { + auto {} + } +} +`, context) +} + +func testAccSecretManagerSecret_basicWithSecretVersions(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } +} + +resource "google_secret_manager_secret_version" "secret-version-1" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-1" +} + +resource "google_secret_manager_secret_version" "secret-version-2" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-2" +} + +resource "google_secret_manager_secret_version" "secret-version-3" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-3" +} + +resource "google_secret_manager_secret_version" "secret-version-4" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-4" +} +`, context) +} + +func testAccSecretManagerSecret_versionAliasesBasic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + version_aliases = { + firstalias = "1", + secondalias = "2", + thirdalias = "3", + otheralias = "2", + somealias = "3" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } +} + +resource "google_secret_manager_secret_version" "secret-version-1" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-1" +} + +resource "google_secret_manager_secret_version" "secret-version-2" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-2" +} + +resource "google_secret_manager_secret_version" "secret-version-3" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-3" +} + +resource "google_secret_manager_secret_version" "secret-version-4" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-4" +} +`, context) +} + +func testAccSecretManagerSecret_versionAliasesUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + version_aliases = { + firstalias = "1", + secondaliasupdated = "2", + otheralias = "1", + somealias = "3", + fourthalias = "4" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } +} + +resource "google_secret_manager_secret_version" "secret-version-1" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-1" +} + +resource "google_secret_manager_secret_version" "secret-version-2" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-2" +} + +resource "google_secret_manager_secret_version" "secret-version-3" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-3" +} + +resource "google_secret_manager_secret_version" "secret-version-4" { + secret = google_secret_manager_secret.secret-basic.id + + secret_data = "some-secret-data-%{random_suffix}-4" +} +`, context) +} + +func testAccSecretMangerSecret_userManagedCmekBasic(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + project_id = "%{pid}" +} +resource "google_kms_crypto_key_iam_member" "kms-central-binding-1" { + crypto_key_id = "%{kms_key_name_central}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-central-binding-2" { + crypto_key_id = "%{kms_key_name_central_other}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-east-binding" { + crypto_key_id = "%{kms_key_name_east}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + depends_on = [ + google_kms_crypto_key_iam_member.kms-central-binding-1, + google_kms_crypto_key_iam_member.kms-central-binding-2, + google_kms_crypto_key_iam_member.kms-east-binding, + ] +} +`, context) +} + +func testAccSecretMangerSecret_userManagedCmekUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + project_id = "%{pid}" +} +resource "google_kms_crypto_key_iam_member" "kms-central-binding-1" { + crypto_key_id = "%{kms_key_name_central}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-central-binding-2" { + crypto_key_id = "%{kms_key_name_central_other}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-east-binding" { + crypto_key_id = "%{kms_key_name_east}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + replication { + user_managed { + replicas { + location = "us-central1" + customer_managed_encryption { + kms_key_name = "%{kms_key_name_central}" + } + } + replicas { + location = "us-east1" + customer_managed_encryption { + kms_key_name = "%{kms_key_name_east}" + } + } + } + } + depends_on = [ + google_kms_crypto_key_iam_member.kms-central-binding-1, + google_kms_crypto_key_iam_member.kms-central-binding-2, + google_kms_crypto_key_iam_member.kms-east-binding, + ] +} +`, context) +} + +func testAccSecretMangerSecret_userManagedCmekUpdate2(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + project_id = "%{pid}" +} +resource "google_kms_crypto_key_iam_member" "kms-central-binding-1" { + crypto_key_id = "%{kms_key_name_central}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-central-binding-2" { + crypto_key_id = "%{kms_key_name_central_other}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-east-binding" { + crypto_key_id = "%{kms_key_name_east}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + replication { + user_managed { + replicas { + location = "us-central1" + customer_managed_encryption { + kms_key_name = "%{kms_key_name_central_other}" + } + } + replicas { + location = "us-east1" + customer_managed_encryption { + kms_key_name = "%{kms_key_name_east}" + } + } + } + } + depends_on = [ + google_kms_crypto_key_iam_member.kms-central-binding-1, + google_kms_crypto_key_iam_member.kms-central-binding-2, + google_kms_crypto_key_iam_member.kms-east-binding, + ] +} +`, context) +} + +func testAccSecretMangerSecret_automaticCmekBasic(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + project_id = "%{pid}" +} +resource "google_kms_crypto_key_iam_member" "kms-secret-binding-1" { + crypto_key_id = "%{kms_key_name_1}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-secret-binding-2" { + crypto_key_id = "%{kms_key_name_2}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + replication { + auto {} + } + depends_on = [ + google_kms_crypto_key_iam_member.kms-secret-binding-1, + google_kms_crypto_key_iam_member.kms-secret-binding-2, + ] +} +`, context) +} + +func testAccSecretMangerSecret_automaticCmekUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + project_id = "%{pid}" +} +resource "google_kms_crypto_key_iam_member" "kms-secret-binding-1" { + crypto_key_id = "%{kms_key_name_1}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-secret-binding-2" { + crypto_key_id = "%{kms_key_name_2}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + replication { + auto { + customer_managed_encryption { + kms_key_name = "%{kms_key_name_1}" + } + } + } + depends_on = [ + google_kms_crypto_key_iam_member.kms-secret-binding-1, + google_kms_crypto_key_iam_member.kms-secret-binding-2, + ] +} +`, context) +} + +func testAccSecretMangerSecret_automaticCmekUpdate2(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + project_id = "%{pid}" +} +resource "google_kms_crypto_key_iam_member" "kms-secret-binding-1" { + crypto_key_id = "%{kms_key_name_1}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_kms_crypto_key_iam_member" "kms-secret-binding-2" { + crypto_key_id = "%{kms_key_name_2}" + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" +} +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + replication { + auto { + customer_managed_encryption { + kms_key_name = "%{kms_key_name_2}" + } + } + } + depends_on = [ + google_kms_crypto_key_iam_member.kms-secret-binding-1, + google_kms_crypto_key_iam_member.kms-secret-binding-2, + ] +} +`, context) +} + +func testAccSecretManagerSecret_withoutRotationPeriod(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_pubsub_topic_iam_member" "secrets_manager_access" { + topic = google_pubsub_topic.topic.name + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" + role = "roles/pubsub.publisher" +} + +resource "google_pubsub_topic" "topic" { + name = "tf-test-topic-%{random_suffix}" +} + +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + replication { + user_managed { + replicas { + location = "us-central1" + } + } + } + + depends_on = [ + google_pubsub_topic_iam_member.secrets_manager_access, + ] +} +`, context) +} + +func testAccSecretManagerSecret_rotationPeriodBasic(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_pubsub_topic_iam_member" "secrets_manager_access" { + topic = google_pubsub_topic.topic.name + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" + role = "roles/pubsub.publisher" +} + +resource "google_pubsub_topic" "topic" { + name = "tf-test-topic-%{random_suffix}" +} + +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + topics { + name = google_pubsub_topic.topic.id + } + + rotation { + rotation_period = "3600s" + next_rotation_time = "%{timestamp}" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + } + } + + depends_on = [ + google_pubsub_topic_iam_member.secrets_manager_access, + ] +} +`, context) +} + +func testAccSecretManagerSecret_rotationPeriodUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" {} + +resource "google_pubsub_topic_iam_member" "secrets_manager_access" { + topic = google_pubsub_topic.topic.name + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-secretmanager.iam.gserviceaccount.com" + role = "roles/pubsub.publisher" +} + +resource "google_pubsub_topic" "topic" { + name = "tf-test-topic-%{random_suffix}" +} + +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + topics { + name = google_pubsub_topic.topic.id + } + + rotation { + rotation_period = "3700s" + next_rotation_time = "%{timestamp}" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + } + } + + depends_on = [ + google_pubsub_topic_iam_member.secrets_manager_access, + ] +} +`, context) +} + +func testAccSecretManagerSecret_withoutTtl(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } +} +`, context) +} + +func testAccSecretManagerSecret_ttlUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + + ttl = "7200s" + +} +`, context) +} + +func testAccSecretManagerSecret_withoutVersionDestroyTtl(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } +} +`, context) +} + +func testAccSecretManagerSecret_versionDestroyTtlUpdate(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + + version_destroy_ttl = "86400s" + +} +`, context) +} + +func testAccSecretManagerSecret_expireTime(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + user_managed { + replicas { + location = "us-central1" + } + replicas { + location = "us-east1" + } + } + } + + expire_time = "2122-09-26T10:55:55.163240682Z" + +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/secretmanager/go/resource_secret_manager_secret_version_test.go b/mmv1/third_party/terraform/services/secretmanager/go/resource_secret_manager_secret_version_test.go new file mode 100644 index 000000000000..3a89b8473a2e --- /dev/null +++ b/mmv1/third_party/terraform/services/secretmanager/go/resource_secret_manager_secret_version_test.go @@ -0,0 +1,97 @@ +package secretmanager_test + +import ( + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccSecretManagerSecretVersion_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecretManagerSecretVersionDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecretManagerSecretVersion_basic(context), + }, + { + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccSecretManagerSecretVersion_disable(context), + }, + { + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, + // at this point the secret data is disabled and so reading the data on import will + // give an empty string + ImportStateVerifyIgnore: []string{"secret_data"}, + }, + { + Config: testAccSecretManagerSecretVersion_basic(context), + }, + { + ResourceName: "google_secret_manager_secret_version.secret-version-basic", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccSecretManagerSecretVersion_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-version-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + auto {} + } +} + +resource "google_secret_manager_secret_version" "secret-version-basic" { + secret = google_secret_manager_secret.secret-basic.name + + secret_data = "my-tf-test-secret%{random_suffix}" + enabled = true +} +`, context) +} + +func testAccSecretManagerSecretVersion_disable(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_secret_manager_secret" "secret-basic" { + secret_id = "tf-test-secret-version-%{random_suffix}" + + labels = { + label = "my-label" + } + + replication { + auto {} + } +} + +resource "google_secret_manager_secret_version" "secret-version-basic" { + secret = google_secret_manager_secret.secret-basic.name + + secret_data = "my-tf-test-secret%{random_suffix}" + enabled = false +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/securityscanner/go/resource_security_scanner_scan_config_test.go.tmpl b/mmv1/third_party/terraform/services/securityscanner/go/resource_security_scanner_scan_config_test.go.tmpl new file mode 100644 index 000000000000..2dcca59bf305 --- /dev/null +++ b/mmv1/third_party/terraform/services/securityscanner/go/resource_security_scanner_scan_config_test.go.tmpl @@ -0,0 +1,76 @@ +package securityscanner_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccSecurityScannerScanConfig_scanConfigUpdate(t *testing.T) { + t.Parallel() + + firstAddressSuffix := acctest.RandString(t, 10) + secondAddressSuffix := acctest.RandString(t, 10) + context := map[string]interface{}{ + "random_suffix": firstAddressSuffix, + "random_suffix2": secondAddressSuffix, + "static_address_name": "scanner_static_ip", + "user_agent": "CHROME_LINUX", + "export": "ENABLED", + "max_qps": 10, + } + updateContext := map[string]interface{}{ + "random_suffix": firstAddressSuffix, + "random_suffix2": secondAddressSuffix, + "static_address_name": "scanner_static_ip_update", + "user_agent": "CHROME_ANDROID", + "export": "DISABLED", + "max_qps": 20, + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSecurityScannerScanConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSecurityScannerScanConfig(context), + }, + { + ResourceName: "google_security_scanner_scan_config.terraform-scan-config", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccSecurityScannerScanConfig(updateContext), + }, + }, + }) +} + +func testAccSecurityScannerScanConfig(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_address" "scanner_static_ip" { + name = "tf-test-scan-static-ip-%{random_suffix}" +} + +resource "google_compute_address" "scanner_static_ip_update" { + name = "tf-test-scan-static-ip-%{random_suffix2}" +} + +resource "google_security_scanner_scan_config" "terraform-scan-config" { + display_name = "terraform-scan-config-%{random_suffix}" + max_qps = %{max_qps} + starting_urls = ["http://${google_compute_address.%{static_address_name}.address}"] + target_platforms = ["COMPUTE"] + user_agent = "%{user_agent}" + export_to_security_command_center = "%{export}" +} +`, context) +} +{{- else }} +// Magic Modules doesn't let us remove files - blank out beta-only common-compile files for now. +{{- end }} + diff --git a/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_endpoint_test.go.tmpl b/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_endpoint_test.go.tmpl new file mode 100644 index 000000000000..337184b0e14d --- /dev/null +++ b/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_endpoint_test.go.tmpl @@ -0,0 +1,105 @@ +package servicedirectory_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccServiceDirectoryEndpoint_serviceDirectoryEndpointUpdateExample(t *testing.T) { + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + location := "us-central1" + testId := fmt.Sprintf("tf-test-example-endpoint%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckServiceDirectoryEndpointDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccServiceDirectoryEndpoint_basic(location, testId), + }, + { + ResourceName: "google_service_directory_endpoint.example", + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_service_directory_endpoint.example", + // {{"{{"}}project{{"}}"}}/{{"{{"}}location{{"}}"}}/{{"{{"}}namespace_id{{"}}"}}/{{"{{"}}service_id{{"}}"}}/{{"{{"}}endpoint_id{{"}}"}} + ImportStateId: fmt.Sprintf("%s/%s/%s/%s/%s", project, location, testId, testId, testId), + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_service_directory_endpoint.example", + // {{"{{"}}location{{"}}"}}/{{"{{"}}namespace_id{{"}}"}}/{{"{{"}}service_id{{"}}"}}/{{"{{"}}endpoint_id{{"}}"}} + ImportStateId: fmt.Sprintf("%s/%s/%s/%s", location, testId, testId, testId), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccServiceDirectoryEndpoint_update(location, testId), + }, + { + ResourceName: "google_service_directory_endpoint.example", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccServiceDirectoryEndpoint_basic(location, testId string) string { + return fmt.Sprintf(` +resource "google_service_directory_namespace" "example" { + namespace_id = "%s" + location = "%s" +} + +resource "google_service_directory_service" "example" { + service_id = "%s" + namespace = google_service_directory_namespace.example.id +} + +resource "google_service_directory_endpoint" "example" { + endpoint_id = "%s" + service = google_service_directory_service.example.id +} +`, testId, location, testId, testId) +} + +func testAccServiceDirectoryEndpoint_update(location, testId string) string { + return fmt.Sprintf(` +resource "google_service_directory_namespace" "example" { + namespace_id = "%s" + location = "%s" +} + +resource "google_service_directory_service" "example" { + service_id = "%s" + namespace = google_service_directory_namespace.example.id +} + +resource "google_service_directory_endpoint" "example" { + endpoint_id = "%s" + service = google_service_directory_service.example.id + + metadata = { + stage = "prod" + region = "us-central1" + } + + address = "1.2.3.4" + port = 5353 +} +`, testId, location, testId, testId) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_namespace_test.go.tmpl b/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_namespace_test.go.tmpl new file mode 100644 index 000000000000..4880ece2ac8f --- /dev/null +++ b/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_namespace_test.go.tmpl @@ -0,0 +1,83 @@ +package servicedirectory_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccServiceDirectoryNamespace_serviceDirectoryNamespaceUpdateExample(t *testing.T) { + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + location := "us-central1" + testId := fmt.Sprintf("tf-test-example-namespace%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckServiceDirectoryNamespaceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccServiceDirectoryNamespace_basic(location, testId), + }, + { + ResourceName: "google_service_directory_namespace.example", + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_service_directory_namespace.example", + // {{"{{"}}project{{"}}"}}/{{"{{"}}location{{"}}"}}/{{"{{"}}namespace_id{{"}}"}} + ImportStateId: fmt.Sprintf("%s/%s/%s", project, location, testId), + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_service_directory_namespace.example", + // {{"{{"}}location{{"}}"}}/{{"{{"}}namespace_id{{"}}"}} + ImportStateId: fmt.Sprintf("%s/%s", location, testId), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccServiceDirectoryNamespace_update(location, testId), + }, + { + ResourceName: "google_service_directory_namespace.example", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccServiceDirectoryNamespace_basic(location, testId string) string { + return fmt.Sprintf(` +resource "google_service_directory_namespace" "example" { + namespace_id = "%s" + location = "%s" +} +`, testId, location) +} + +func testAccServiceDirectoryNamespace_update(location, testId string) string { + return fmt.Sprintf(` +resource "google_service_directory_namespace" "example" { + namespace_id = "%s" + location = "%s" + + labels = { + key = "value" + foo = "bar" + } +} +`, testId, location) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_service_test.go.tmpl b/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_service_test.go.tmpl new file mode 100644 index 000000000000..50889cc56357 --- /dev/null +++ b/mmv1/third_party/terraform/services/servicedirectory/go/resource_service_directory_service_test.go.tmpl @@ -0,0 +1,92 @@ +package servicedirectory_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccServiceDirectoryService_serviceDirectoryServiceUpdateExample(t *testing.T) { + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + location := "us-central1" + testId := fmt.Sprintf("tf-test-example-service%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckServiceDirectoryServiceDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccServiceDirectoryService_basic(location, testId), + }, + { + ResourceName: "google_service_directory_service.example", + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_service_directory_service.example", + // {{"{{"}}project{{"}}"}}/{{"{{"}}location{{"}}"}}/{{"{{"}}namespace_id{{"}}"}}/{{"{{"}}service_id{{"}}"}} + ImportStateId: fmt.Sprintf("%s/%s/%s/%s", project, location, testId, testId), + ImportState: true, + ImportStateVerify: true, + }, + { + ResourceName: "google_service_directory_service.example", + // {{"{{"}}location{{"}}"}}/{{"{{"}}namespace_id{{"}}"}}/{{"{{"}}service_id{{"}}"}} + ImportStateId: fmt.Sprintf("%s/%s/%s", location, testId, testId), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccServiceDirectoryService_update(location, testId), + }, + { + ResourceName: "google_service_directory_service.example", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccServiceDirectoryService_basic(location, testId string) string { + return fmt.Sprintf(` +resource "google_service_directory_namespace" "example" { + namespace_id = "%s" + location = "%s" +} + +resource "google_service_directory_service" "example" { + service_id = "%s" + namespace = google_service_directory_namespace.example.id +} +`, testId, location, testId) +} + +func testAccServiceDirectoryService_update(location, testId string) string { + return fmt.Sprintf(` +resource "google_service_directory_namespace" "example" { + namespace_id = "%s" + location = "%s" +} + +resource "google_service_directory_service" "example" { + service_id = "%s" + namespace = google_service_directory_namespace.example.id + + metadata = { + stage = "prod" + region = "us-central1" + } +} +`, testId, location, testId) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/serviceusage/go/resource_service_usage_consumer_quota_override_test.go.tmpl b/mmv1/third_party/terraform/services/serviceusage/go/resource_service_usage_consumer_quota_override_test.go.tmpl new file mode 100644 index 000000000000..3edd4118ea91 --- /dev/null +++ b/mmv1/third_party/terraform/services/serviceusage/go/resource_service_usage_consumer_quota_override_test.go.tmpl @@ -0,0 +1,55 @@ +package serviceusage_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "regexp" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccServiceUsageConsumerQuotaOverride_consumerQuotaOverrideCustomIncorrectLimitFormat(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "org_id": envvar.GetTestOrgFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckServiceUsageConsumerQuotaOverrideDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccServiceUsageConsumerQuotaOverride_consumerQuotaOverrideCustomIncorrectLimitFormat(context), + ExpectError: regexp.MustCompile("No quota limit with limitId"), + }, + }, + }) +} + +func testAccServiceUsageConsumerQuotaOverride_consumerQuotaOverrideCustomIncorrectLimitFormat(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "my_project" { + provider = google-beta + name = "tf-test-project" + project_id = "quota%{random_suffix}" + org_id = "%{org_id}" +} + +resource "google_service_usage_consumer_quota_override" "override" { + provider = google-beta + project = google_project.my_project.project_id + service = urlencode("bigquery.googleapis.com") + metric = urlencode("bigquery.googleapis.com/quota/query/usage") + limit = urlencode("1/d/{project}/{user}") # Incorrect format for the API the provider uses, correct format for the gcloud CLI + override_value = "1" + force = true +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/spanner/go/resource_spanner_database_test.go.tmpl b/mmv1/third_party/terraform/services/spanner/go/resource_spanner_database_test.go.tmpl new file mode 100644 index 000000000000..7fc2cecbf66e --- /dev/null +++ b/mmv1/third_party/terraform/services/spanner/go/resource_spanner_database_test.go.tmpl @@ -0,0 +1,608 @@ +package spanner_test + +import ( + "fmt" + "regexp" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccSpannerDatabase_basic(t *testing.T) { + t.Parallel() + + project := envvar.GetTestProjectFromEnv() + rnd := acctest.RandString(t, 10) + instanceName := fmt.Sprintf("tf-test-%s", rnd) + databaseName := fmt.Sprintf("tfgen_%s", rnd) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabase_virtualUpdate(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "version_retention_period", "1h"), // default set by API + ), + }, + { + Config: testAccSpannerDatabase_basic(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "version_retention_period", "1h"), // default set by API + ), + }, + { + // Test import with default Terraform ID + ResourceName: "google_spanner_database.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + { + Config: testAccSpannerDatabase_basicUpdate(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "version_retention_period", "2d"), + ), + }, + { + // Test import with default Terraform ID + ResourceName: "google_spanner_database.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + { + ResourceName: "google_spanner_database.basic", + ImportStateId: fmt.Sprintf("projects/%s/instances/%s/databases/%s", project, instanceName, databaseName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + { + ResourceName: "google_spanner_database.basic", + ImportStateId: fmt.Sprintf("instances/%s/databases/%s", instanceName, databaseName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + { + ResourceName: "google_spanner_database.basic", + ImportStateId: fmt.Sprintf("%s/%s", instanceName, databaseName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + }, + }) +} + +func testAccSpannerDatabase_basic(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName) +} + +func testAccSpannerDatabase_basicUpdate(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + version_retention_period = "2d" # increase from default 1h + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + "CREATE TABLE t3 (t3 INT64 NOT NULL,) PRIMARY KEY(t3)", + "CREATE TABLE t4 (t4 INT64 NOT NULL,) PRIMARY KEY(t4)", + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName) +} + +func testAccSpannerDatabase_virtualUpdate(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + ] + deletion_protection = true +} +`, instanceName, instanceName, databaseName) +} + +func TestAccSpannerDatabase_postgres(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(t, 10) + instanceName := fmt.Sprintf("tf-test-%s", rnd) + databaseName := fmt.Sprintf("tfgen_%s", rnd) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabase_postgres(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic_spangres", "state"), + ), + }, + { + // Test import with default Terraform ID + ResourceName: "google_spanner_database.basic_spangres", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + { + Config: testAccSpannerDatabase_postgresUpdate(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic_spangres", "state"), + ), + }, + { + // Test import with default Terraform ID + ResourceName: "google_spanner_database.basic_spangres", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + }, + }) +} + +func testAccSpannerDatabase_postgres(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic_spangres" { + instance = google_spanner_instance.basic.name + name = "%s-spangres" + database_dialect = "POSTGRESQL" + // Confirm that DDL can be run at creation time for POSTGRESQL + version_retention_period = "2h" + ddl = [ + "CREATE TABLE t1 (t1 bigint NOT NULL PRIMARY KEY)", + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName) +} + +func testAccSpannerDatabase_postgresUpdate(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic_spangres" { + instance = google_spanner_instance.basic.name + name = "%s-spangres" + database_dialect = "POSTGRESQL" + version_retention_period = "4d" + ddl = [ + "CREATE TABLE t2 (t2 bigint NOT NULL PRIMARY KEY)", + "CREATE TABLE t3 (t3 bigint NOT NULL PRIMARY KEY)", + "CREATE TABLE t4 (t4 bigint NOT NULL PRIMARY KEY)", + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName) +} + +func TestAccSpannerDatabase_versionRetentionPeriod(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(t, 10) + instanceName := fmt.Sprintf("tf-test-%s", rnd) + databaseName := fmt.Sprintf("tfgen_%s", rnd) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), + Steps: []resource.TestStep{ + { + // Test creating a database with `version_retention_period` set + Config: testAccSpannerDatabase_versionRetentionPeriod(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "version_retention_period", "2h"), + ), + }, + { + // Test removing `version_retention_period` and setting retention period to a new value with a DDL statement in `ddl` + Config: testAccSpannerDatabase_versionRetentionPeriodUpdate1(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "version_retention_period", "4h"), + ), + }, + { + // Test that adding `version_retention_period` controls retention time, regardless of any previous statements in `ddl` + Config: testAccSpannerDatabase_versionRetentionPeriodUpdate2(instanceName, databaseName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "version_retention_period", "2h"), + ), + }, + { + // Test that changing the retention value via DDL when `version_retention_period` is set: + // - changes the value (from 2h to 8h) + // - is unstable; non-empty plan afterwards due to conflict + Config: testAccSpannerDatabase_versionRetentionPeriodUpdate3(instanceName, databaseName), + ExpectNonEmptyPlan: true, // is unstable + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "version_retention_period", "8h"), + ), + }, + { + // Test that when the above config is reapplied: + // - changes the value (reverts to set value of `version_retention_period`, 2h) + // - is stable; no further conflict + Config: testAccSpannerDatabase_versionRetentionPeriodUpdate3(instanceName, databaseName), //same as previous step + ExpectNonEmptyPlan: false, // is stable + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttrSet("google_spanner_database.basic", "state"), + resource.TestCheckResourceAttr("google_spanner_database.basic", "version_retention_period", "2h"), + ), + }, + }, + }) +} + +func testAccSpannerDatabase_versionRetentionPeriod(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + version_retention_period = "2h" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName) +} + +func testAccSpannerDatabase_versionRetentionPeriodUpdate1(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + // Change 1/2 : deleted version_retention_period argument + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "ALTER DATABASE %s SET OPTIONS (version_retention_period=\"4h\")", // Change 2/2 : set retention with new DDL + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName, databaseName) +} + +func testAccSpannerDatabase_versionRetentionPeriodUpdate2(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + version_retention_period = "2h" // Change : added version_retention_period argument + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "ALTER DATABASE %s SET OPTIONS (version_retention_period=\"4h\")", + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName, databaseName) +} + +func testAccSpannerDatabase_versionRetentionPeriodUpdate3(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + version_retention_period = "2h" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "ALTER DATABASE %s SET OPTIONS (version_retention_period=\"4h\")", + "ALTER DATABASE %s SET OPTIONS (version_retention_period=\"8h\")", // Change : set retention with new DDL + ] + deletion_protection = false +} +`, instanceName, instanceName, databaseName, databaseName, databaseName) +} + +func TestAccSpannerDatabase_enableDropProtection(t *testing.T) { + t.Parallel() + + rnd := acctest.RandString(t, 10) + instanceName := fmt.Sprintf("tf-test-%s", rnd) + databaseName := fmt.Sprintf("tfgen_%s", rnd) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabase_enableDropProtection(instanceName, databaseName), + }, + { + ResourceName: "google_spanner_database.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + { + Config: testAccSpannerDatabase_enableDropProtectionUpdate(instanceName, databaseName), + }, + { + ResourceName: "google_spanner_database.basic", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + }, + }) +} + +func testAccSpannerDatabase_enableDropProtection(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + enable_drop_protection = true + deletion_protection = false + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + ] +} +`, instanceName, instanceName, databaseName) +} + +func testAccSpannerDatabase_enableDropProtectionUpdate(instanceName, databaseName string) string { + return fmt.Sprintf(` +resource "google_spanner_instance" "basic" { + name = "%s" + config = "regional-us-central1" + display_name = "%s-display" + num_nodes = 1 +} + +resource "google_spanner_database" "basic" { + instance = google_spanner_instance.basic.name + name = "%s" + enable_drop_protection = false + deletion_protection = false + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + ] +} +`, instanceName, instanceName, databaseName) +} + +func TestAccSpannerDatabase_deletionProtection(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabase_deletionProtection(context), + }, + { + ResourceName: "google_spanner_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "instance", "deletion_protection"}, + }, + { + Config: testAccSpannerDatabase_deletionProtection(context), + Destroy: true, + ExpectError: regexp.MustCompile("deletion_protection"), + }, + { + Config: testAccSpannerDatabase_spannerDatabaseBasicExample(context), + }, + }, + }) +} + +func testAccSpannerDatabase_deletionProtection(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_spanner_instance" "main" { + config = "regional-europe-west1" + display_name = "main-instance" + num_nodes = 1 +} + +resource "google_spanner_database" "database" { + instance = google_spanner_instance.main.name + name = "tf-test-my-database%{random_suffix}" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + ] +} +`, context) +} + +{{ if ne $.TargetVersionName `ga` -}} +{{/* Field is not beta, but google_project_service_identity dependency is */ -}} +func TestAccSpannerDatabase_cmek(t *testing.T) { + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckSpannerDatabaseDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccSpannerDatabase_cmek(context), + }, + { + ResourceName: "google_spanner_database.database", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"ddl", "deletion_protection"}, + }, + }, + }) +} + +func testAccSpannerDatabase_cmek(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_spanner_instance" "main" { + provider = google-beta + config = "regional-europe-west1" + display_name = "main-instance1" + num_nodes = 1 +} + +resource "google_spanner_database" "database" { + provider = google-beta + instance = google_spanner_instance.main.name + name = "tf-test-cmek-db%{random_suffix}" + ddl = [ + "CREATE TABLE t1 (t1 INT64 NOT NULL,) PRIMARY KEY(t1)", + "CREATE TABLE t2 (t2 INT64 NOT NULL,) PRIMARY KEY(t2)", + ] + + encryption_config { + kms_key_name = google_kms_crypto_key.example-key.id + } + + deletion_protection = false + + depends_on = [google_kms_crypto_key_iam_member.crypto-key-binding] +} + +resource "google_kms_key_ring" "keyring" { + provider = google-beta + name = "tf-test-ring%{random_suffix}" + location = "europe-west1" +} + +resource "google_kms_crypto_key" "example-key" { + provider = google-beta + name = "tf-test-key%{random_suffix}" + key_ring = google_kms_key_ring.keyring.id + rotation_period = "100000s" +} + +resource "google_kms_crypto_key_iam_member" "crypto-key-binding" { + provider = google-beta + crypto_key_id = google_kms_crypto_key.example-key.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + + member = "serviceAccount:${google_project_service_identity.ck_sa.email}" +} + +data "google_project" "project" { + provider = google-beta +} + +resource "google_project_service_identity" "ck_sa" { + provider = google-beta + project = data.google_project.project.project_id + service = "spanner.googleapis.com" +} + +`, context) +} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/storage/go/resource_storage_bucket.go b/mmv1/third_party/terraform/services/storage/go/resource_storage_bucket.go new file mode 100644 index 000000000000..905f4da3d07e --- /dev/null +++ b/mmv1/third_party/terraform/services/storage/go/resource_storage_bucket.go @@ -0,0 +1,1903 @@ +package storage + +import ( + "bytes" + "context" + "errors" + "fmt" + "log" + "math" + "regexp" + "runtime" + "strconv" + "strings" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" + + "github.com/gammazero/workerpool" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + + "google.golang.org/api/googleapi" + "google.golang.org/api/storage/v1" +) + +func ResourceStorageBucket() *schema.Resource { + return &schema.Resource{ + Create: resourceStorageBucketCreate, + Read: resourceStorageBucketRead, + Update: resourceStorageBucketUpdate, + Delete: resourceStorageBucketDelete, + Importer: &schema.ResourceImporter{ + State: resourceStorageBucketStateImporter, + }, + CustomizeDiff: customdiff.All( + customdiff.ForceNewIfChange("retention_policy.0.is_locked", isPolicyLocked), + tpgresource.SetLabelsDiff, + ), + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(10 * time.Minute), + Update: schema.DefaultTimeout(4 * time.Minute), + Read: schema.DefaultTimeout(4 * time.Minute), + }, + + SchemaVersion: 1, + StateUpgraders: []schema.StateUpgrader{ + { + Type: resourceStorageBucketV0().CoreConfigSchema().ImpliedType(), + Upgrade: ResourceStorageBucketStateUpgradeV0, + Version: 0, + }, + }, + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The name of the bucket.`, + ValidateFunc: verify.ValidateGCSName, + }, + + "encryption": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "default_kms_key_name": { + Type: schema.TypeString, + Required: true, + Description: `A Cloud KMS key that will be used to encrypt objects inserted into this bucket, if no encryption method is specified. You must pay attention to whether the crypto key is available in the location that this bucket is created in. See the docs for more details.`, + }, + }, + }, + Description: `The bucket's encryption configuration.`, + }, + + "requester_pays": { + Type: schema.TypeBool, + Optional: true, + Description: `Enables Requester Pays on a storage bucket.`, + }, + + "force_destroy": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `When deleting a bucket, this boolean option will delete all contained objects. If you try to delete a bucket that contains objects, Terraform will fail that run.`, + }, + + "labels": { + Type: schema.TypeMap, + ValidateFunc: labelKeyValidator, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `A set of key/value label pairs to assign to the bucket.`, + }, + + "terraform_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `The combination of labels configured directly on the resource and default labels configured on the provider.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "effective_labels": { + Type: schema.TypeMap, + Computed: true, + Description: `All of labels (key/value pairs) present on the resource in GCP, including the labels configured through Terraform, other clients and services.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + + "location": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + StateFunc: func(s interface{}) string { + return strings.ToUpper(s.(string)) + }, + Description: `The Google Cloud Storage location`, + }, + + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The ID of the project in which the resource belongs. If it is not provided, the provider project is used.`, + }, + + "project_number": { + Type: schema.TypeInt, + Computed: true, + Description: `The project number of the project in which the resource belongs.`, + }, + + "self_link": { + Type: schema.TypeString, + Computed: true, + Description: `The URI of the created resource.`, + }, + + "url": { + Type: schema.TypeString, + Computed: true, + Description: `The base URL of the bucket, in the format gs://.`, + }, + + "storage_class": { + Type: schema.TypeString, + Optional: true, + Default: "STANDARD", + Description: `The Storage Class of the new bucket. Supported values include: STANDARD, MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE.`, + }, + + "lifecycle_rule": { + Type: schema.TypeList, + Optional: true, + MaxItems: 100, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "action": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + MaxItems: 1, + Set: resourceGCSBucketLifecycleRuleActionHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "type": { + Type: schema.TypeString, + Required: true, + Description: `The type of the action of this Lifecycle Rule. Supported values include: Delete, SetStorageClass and AbortIncompleteMultipartUpload.`, + }, + "storage_class": { + Type: schema.TypeString, + Optional: true, + Description: `The target Storage Class of objects affected by this Lifecycle Rule. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE.`, + }, + }, + }, + Description: `The Lifecycle Rule's action configuration. A single block of this type is supported.`, + }, + "condition": { + Type: schema.TypeSet, + Required: true, + MinItems: 1, + MaxItems: 1, + Set: resourceGCSBucketLifecycleRuleConditionHash, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "age": { + Type: schema.TypeInt, + Optional: true, + Description: `Minimum age of an object in days to satisfy this condition.`, + }, + "created_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "custom_time_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "days_since_custom_time": { + Type: schema.TypeInt, + Optional: true, + Description: `Number of days elapsed since the user-specified timestamp set on an object.`, + }, + "days_since_noncurrent_time": { + Type: schema.TypeInt, + Optional: true, + Description: `Number of days elapsed since the noncurrent timestamp of an object. This + condition is relevant only for versioned objects.`, + }, + "noncurrent_time_before": { + Type: schema.TypeString, + Optional: true, + Description: `Creation date of an object in RFC 3339 (e.g. 2017-06-13) to satisfy this condition.`, + }, + "no_age": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, age value will be omitted.Required to set true when age is unset in the config file.`, + }, + "with_state": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ValidateFunc: validation.StringInSlice([]string{"LIVE", "ARCHIVED", "ANY", ""}, false), + Description: `Match to live and/or archived objects. Unversioned buckets have only live objects. Supported values include: "LIVE", "ARCHIVED", "ANY".`, + }, + "matches_storage_class": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `Storage Class of objects to satisfy this condition. Supported values include: MULTI_REGIONAL, REGIONAL, NEARLINE, COLDLINE, ARCHIVE, STANDARD, DURABLE_REDUCED_AVAILABILITY.`, + }, + "num_newer_versions": { + Type: schema.TypeInt, + Optional: true, + Description: `Relevant only for versioned objects. The number of newer versions of an object to satisfy this condition.`, + }, + "matches_prefix": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `One or more matching name prefixes to satisfy this condition.`, + }, + "matches_suffix": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{Type: schema.TypeString}, + Description: `One or more matching name suffixes to satisfy this condition.`, + }, + "send_days_since_noncurrent_time_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, days_since_noncurrent_time value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the days_since_noncurrent_time field. It can be used alone or together with days_since_noncurrent_time.`, + }, + "send_days_since_custom_time_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, days_since_custom_time value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the days_since_custom_time field. It can be used alone or together with days_since_custom_time.`, + }, + "send_num_newer_versions_if_zero": { + Type: schema.TypeBool, + Optional: true, + Description: `While set true, num_newer_versions value will be sent in the request even for zero value of the field. This field is only useful for setting 0 value to the num_newer_versions field. It can be used alone or together with num_newer_versions.`, + }, + }, + }, + Description: `The Lifecycle Rule's condition configuration.`, + }, + }, + }, + Description: `The bucket's Lifecycle Rules configuration.`, + }, + + "enable_object_retention": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: `Enables each object in the bucket to have its own retention policy, which prevents deletion until stored for a specific length of time.`, + }, + + "versioning": { + Type: schema.TypeList, + Optional: true, + Computed: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `While set to true, versioning is fully enabled for this bucket.`, + }, + }, + }, + Description: `The bucket's Versioning configuration.`, + }, + + "autoclass": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "enabled": { + Type: schema.TypeBool, + Required: true, + Description: `While set to true, autoclass automatically transitions objects in your bucket to appropriate storage classes based on each object's access pattern.`, + }, + "terminal_storage_class": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The storage class that objects in the bucket eventually transition to if they are not read for a certain length of time. Supported values include: NEARLINE, ARCHIVE.`, + }, + }, + }, + Description: `The bucket's autoclass configuration.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + _, n := d.GetChange(strings.TrimSuffix(k, ".#")) + if !strings.HasSuffix(k, ".#") { + return false + } + var l []interface{} + if new == "1" && old == "0" { + l = n.([]interface{}) + contents, ok := l[0].(map[string]interface{}) + if !ok { + return false + } + if contents["enabled"] == false { + return true + } + } + if new == "0" && old == "1" { + n := d.Get(strings.TrimSuffix(k, ".#")) + l = n.([]interface{}) + contents := l[0].(map[string]interface{}) + if contents["enabled"] == false { + return true + } + } + return false + }, + }, + "website": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "main_page_suffix": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: []string{"website.0.not_found_page", "website.0.main_page_suffix"}, + Description: `Behaves as the bucket's directory index where missing objects are treated as potential directories.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + return old != "" && new == "" + }, + }, + "not_found_page": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: []string{"website.0.main_page_suffix", "website.0.not_found_page"}, + Description: `The custom object to return when a requested resource is not found.`, + DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { + return old != "" && new == "" + }, + }, + }, + }, + Description: `Configuration if the bucket acts as a website.`, + }, + + "retention_policy": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "is_locked": { + Type: schema.TypeBool, + Optional: true, + Default: false, + Description: `If set to true, the bucket will be locked and permanently restrict edits to the bucket's retention policy. Caution: Locking a bucket is an irreversible action.`, + }, + "retention_period": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, math.MaxInt32), + Description: `The period of time, in seconds, that objects in the bucket must be retained and cannot be deleted, overwritten, or archived. The value must be less than 3,155,760,000 seconds.`, + }, + }, + }, + Description: `Configuration of the bucket's data retention policy for how long objects in the bucket should be retained.`, + }, + + "cors": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "origin": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of Origins eligible to receive CORS response headers. Note: "*" is permitted in the list of origins, and means "any Origin".`, + }, + "method": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of HTTP methods on which to include CORS response headers, (GET, OPTIONS, POST, etc) Note: "*" is permitted in the list of methods, and means "any method".`, + }, + "response_header": { + Type: schema.TypeList, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + Description: `The list of HTTP headers other than the simple response headers to give permission for the user-agent to share across domains.`, + }, + "max_age_seconds": { + Type: schema.TypeInt, + Optional: true, + Description: `The value, in seconds, to return in the Access-Control-Max-Age header used in preflight responses.`, + }, + }, + }, + Description: `The bucket's Cross-Origin Resource Sharing (CORS) configuration.`, + }, + + "default_event_based_hold": { + Type: schema.TypeBool, + Optional: true, + Description: `Whether or not to automatically apply an eventBasedHold to new objects added to the bucket.`, + }, + + "logging": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "log_bucket": { + Type: schema.TypeString, + Required: true, + Description: `The bucket that will receive log objects.`, + }, + "log_object_prefix": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The object prefix for log objects. If it's not provided, by default Google Cloud Storage sets this to this bucket's name.`, + }, + }, + }, + Description: `The bucket's Access & Storage Logs configuration.`, + }, + "uniform_bucket_level_access": { + Type: schema.TypeBool, + Optional: true, + Computed: true, + Description: `Enables uniform bucket-level access on a bucket.`, + }, + "custom_placement_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "data_locations": { + Type: schema.TypeSet, + Required: true, + ForceNew: true, + MaxItems: 2, + MinItems: 2, + Elem: &schema.Schema{ + Type: schema.TypeString, + StateFunc: func(s interface{}) string { + return strings.ToUpper(s.(string)) + }, + }, + Description: `The list of individual regions that comprise a dual-region bucket. See the docs for a list of acceptable regions. Note: If any of the data_locations changes, it will recreate the bucket.`, + }, + }, + }, + Description: `The bucket's custom location configuration, which specifies the individual regions that comprise a dual-region bucket. If the bucket is designated a single or multi-region, the parameters are empty.`, + }, + "rpo": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Specifies the RPO setting of bucket. If set 'ASYNC_TURBO', The Turbo Replication will be enabled for the dual-region bucket. Value 'DEFAULT' will set RPO setting to default. Turbo Replication is only for buckets in dual-regions.See the docs for more details.`, + }, + "public_access_prevention": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `Prevents public access to a bucket.`, + }, + "soft_delete_policy": { + Type: schema.TypeList, + MaxItems: 1, + Optional: true, + Computed: true, + Description: `The bucket's soft delete policy, which defines the period of time that soft-deleted objects will be retained, and cannot be permanently deleted. If it is not provided, by default Google Cloud Storage sets this to default soft delete policy`, + Elem : &schema.Resource{ + Schema: map[string]*schema.Schema{ + "retention_duration_seconds": { + Type: schema.TypeInt, + Default: 604800, + Optional: true, + Description: `The duration in seconds that soft-deleted objects in the bucket will be retained and cannot be permanently deleted. Default value is 604800.`, + }, + "effective_time": { + Type: schema.TypeString, + Computed: true, + Description: `Server-determined value that indicates the time from which the policy, or one with a greater retention, was effective. This value is in RFC 3339 format.`, + }, + }, + }, + }, + }, + UseJSONNumber: true, + } +} + +const resourceDataplexGoogleLabelPrefix = "goog-dataplex" +const resourceDataplexGoogleProvidedLabelPrefix = "labels." + resourceDataplexGoogleLabelPrefix + +var labelKeyRegex = regexp.MustCompile(`^[a-z0-9_-]{1,63}$`) + +func labelKeyValidator(val interface{}, key string) (warns []string, errs []error) { + if val == nil { + return + } + + m := val.(map[string]interface{}) + for k := range m { + if !labelKeyRegex.MatchString(k) { + errs = append(errs, fmt.Errorf("%q is an invalid label key. See https://cloud.google.com/storage/docs/tags-and-labels#bucket-labels", k)) + } + } + return +} + +func resourceDataplexLabelDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + if strings.HasPrefix(k, resourceDataplexGoogleProvidedLabelPrefix) && new == "" { + return true + } + + // Let diff be determined by labels (above) + if strings.HasPrefix(k, "labels.%") { + return true + } + + // For other keys, don't suppress diff. + return false +} + +// Is the old bucket retention policy locked? +func isPolicyLocked(_ context.Context, old, new, _ interface{}) bool { + if old == nil || new == nil { + return false + } + + // if the old policy is locked, but the new policy is not + if old.(bool) && !new.(bool) { + return true + } + + return false +} + +func resourceStorageBucketCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + // Get the bucket and location + bucket := d.Get("name").(string) + location := d.Get("location").(string) + + // Create a bucket, setting the labels, location and name. + sb := &storage.Bucket{ + Name: bucket, + Labels: tpgresource.ExpandEffectiveLabels(d), + Location: location, + IamConfiguration: expandIamConfiguration(d), + } + + if v, ok := d.GetOk("storage_class"); ok { + sb.StorageClass = v.(string) + } + + lifecycle, err := expandStorageBucketLifecycle(d.Get("lifecycle_rule")) + if err != nil { + return err + } + sb.Lifecycle = lifecycle + + if v, ok := d.GetOk("versioning"); ok { + sb.Versioning = expandBucketVersioning(v) + } + + if v, ok := d.GetOk("autoclass"); ok { + sb.Autoclass = expandBucketAutoclass(v) + } + + if v, ok := d.GetOk("website"); ok { + sb.Website = expandBucketWebsite(v.([]interface{})) + } + + if v, ok := d.GetOk("retention_policy"); ok { + // Not using expandBucketRetentionPolicy() here because `is_locked` cannot be set on creation. + retention_policies := v.([]interface{}) + + if len(retention_policies) > 0 { + sb.RetentionPolicy = &storage.BucketRetentionPolicy{} + + retentionPolicy := retention_policies[0].(map[string]interface{}) + + if v, ok := retentionPolicy["retention_period"]; ok { + sb.RetentionPolicy.RetentionPeriod = int64(v.(int)) + } + } + } + + if v, ok := d.GetOk("default_event_based_hold"); ok { + sb.DefaultEventBasedHold = v.(bool) + } + + if v, ok := d.GetOk("cors"); ok { + sb.Cors = expandCors(v.([]interface{})) + } + + if v, ok := d.GetOk("logging"); ok { + sb.Logging = expandBucketLogging(v.([]interface{})) + } + + if v, ok := d.GetOk("encryption"); ok { + sb.Encryption = expandBucketEncryption(v.([]interface{})) + } + + if v, ok := d.GetOk("requester_pays"); ok { + sb.Billing = &storage.BucketBilling{ + RequesterPays: v.(bool), + } + } + + if v, ok := d.GetOk("custom_placement_config"); ok { + sb.CustomPlacementConfig = expandBucketCustomPlacementConfig(v.([]interface{})) + } + + if v, ok := d.GetOk("rpo"); ok{ + sb.Rpo = v.(string) + } + + if v, ok := d.GetOk("soft_delete_policy"); ok { + sb.SoftDeletePolicy = expandBucketSoftDeletePolicy(v.([]interface{})) + } + + var res *storage.Bucket + + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + insertCall := config.NewStorageClient(userAgent).Buckets.Insert(project, sb) + if d.Get("enable_object_retention").(bool) { + insertCall.EnableObjectRetention(true) + } + res, err = insertCall.Do() + return err + }, + Timeout: d.Timeout(schema.TimeoutCreate), + }) + + if err != nil { + fmt.Printf("Error creating bucket %s: %v", bucket, err) + return err + } + + log.Printf("[DEBUG] Created bucket %v at location %v\n\n", res.Name, res.SelfLink) + d.SetId(res.Id) + + // There seems to be some eventual consistency errors in some cases, so we want to check a few times + // to make sure it exists before moving on + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + _, retryErr := config.NewStorageClient(userAgent).Buckets.Get(res.Name).Do() + return retryErr + }, + Timeout: d.Timeout(schema.TimeoutCreate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsNotFoundRetryableError("bucket creation")}, + }) + + if err != nil { + return fmt.Errorf("Error reading bucket after creation: %s", err) + } + + // If the retention policy is not already locked, check if it + // needs to be locked. + if v, ok := d.GetOk("retention_policy"); ok && !res.RetentionPolicy.IsLocked { + retention_policies := v.([]interface{}) + + sb.RetentionPolicy = &storage.BucketRetentionPolicy{} + + retentionPolicy := retention_policies[0].(map[string]interface{}) + + if locked, ok := retentionPolicy["is_locked"]; ok && locked.(bool) { + err = lockRetentionPolicy(config.NewStorageClient(userAgent).Buckets, bucket, res.Metageneration) + if err != nil { + return err + } + + log.Printf("[DEBUG] Locked bucket %v at location %v\n\n", res.Name, res.SelfLink) + } + } + + return resourceStorageBucketRead(d, meta) +} + +func resourceStorageBucketUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + sb := &storage.Bucket{} + + if detectLifecycleChange(d) { + lifecycle, err := expandStorageBucketLifecycle(d.Get("lifecycle_rule")) + if err != nil { + return err + } + sb.Lifecycle = lifecycle + } + + if d.HasChange("requester_pays") { + v := d.Get("requester_pays") + sb.Billing = &storage.BucketBilling{ + RequesterPays: v.(bool), + ForceSendFields: []string{"RequesterPays"}, + } + } + + if d.HasChange("versioning") { + if v, ok := d.GetOk("versioning"); ok { + sb.Versioning = expandBucketVersioning(v) + } + } + + if d.HasChange("autoclass") { + if v, ok := d.GetOk("autoclass"); ok { + sb.Autoclass = expandBucketAutoclass(v) + } else { + sb.Autoclass = &storage.BucketAutoclass{ + Enabled: false, + ForceSendFields: []string{"Enabled"}, + } + } + } + + if d.HasChange("website") { + sb.Website = expandBucketWebsite(d.Get("website")) + } + + if d.HasChange("retention_policy") { + if v, ok := d.GetOk("retention_policy"); ok { + sb.RetentionPolicy = expandBucketRetentionPolicy(v.([]interface{})) + } else { + sb.NullFields = append(sb.NullFields, "RetentionPolicy") + } + } + + if d.HasChange("cors") { + if v, ok := d.GetOk("cors"); ok { + sb.Cors = expandCors(v.([]interface{})) + } else { + sb.NullFields = append(sb.NullFields, "Cors") + } + } + + if d.HasChange("default_event_based_hold") { + v := d.Get("default_event_based_hold") + sb.DefaultEventBasedHold = v.(bool) + sb.ForceSendFields = append(sb.ForceSendFields, "DefaultEventBasedHold") + } + + if d.HasChange("logging") { + if v, ok := d.GetOk("logging"); ok { + sb.Logging = expandBucketLogging(v.([]interface{})) + } else { + sb.NullFields = append(sb.NullFields, "Logging") + } + } + + if d.HasChange("encryption") { + if v, ok := d.GetOk("encryption"); ok { + sb.Encryption = expandBucketEncryption(v.([]interface{})) + } else { + sb.NullFields = append(sb.NullFields, "Encryption") + } + } + + if d.HasChange("effective_labels") { + sb.Labels = tpgresource.ExpandEffectiveLabels(d) + if len(sb.Labels) == 0 { + sb.NullFields = append(sb.NullFields, "Labels") + } + + // To delete a label using PATCH, we have to explicitly set its value + // to null. + old, _ := d.GetChange("effective_labels") + for k := range old.(map[string]interface{}) { + if _, ok := sb.Labels[k]; !ok { + sb.NullFields = append(sb.NullFields, fmt.Sprintf("Labels.%s", k)) + } + } + } + + if d.HasChange("storage_class") { + if v, ok := d.GetOk("storage_class"); ok { + sb.StorageClass = v.(string) + } + } + + if d.HasChange("uniform_bucket_level_access") || d.HasChange("public_access_prevention") { + sb.IamConfiguration = expandIamConfiguration(d) + } + + if d.HasChange("rpo") { + if v,ok := d.GetOk("rpo"); ok{ + sb.Rpo = v.(string) + } else { + sb.NullFields = append(sb.NullFields, "Rpo") + } + } + + if d.HasChange("soft_delete_policy") { + if v, ok := d.GetOk("soft_delete_policy"); ok { + sb.SoftDeletePolicy = expandBucketSoftDeletePolicy(v.([]interface{})) + } + } + + res, err := config.NewStorageClient(userAgent).Buckets.Patch(d.Get("name").(string), sb).Do() + if err != nil { + return err + } + + // Assign the bucket ID as the resource ID + if err := d.Set("self_link", res.SelfLink); err != nil { + return fmt.Errorf("Error setting self_link: %s", err) + } + + // There seems to be some eventual consistency errors in some cases, so we want to check a few times + // to make sure it exists before moving on + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + _, retryErr := config.NewStorageClient(userAgent).Buckets.Get(res.Name).Do() + return retryErr + }, + Timeout: d.Timeout(schema.TimeoutUpdate), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsNotFoundRetryableError("bucket update")}, + }) + + if err != nil { + return fmt.Errorf("Error reading bucket after update: %s", err) + } + + if d.HasChange("retention_policy") { + if v, ok := d.GetOk("retention_policy"); ok { + retention_policies := v.([]interface{}) + + sb.RetentionPolicy = &storage.BucketRetentionPolicy{} + + retentionPolicy := retention_policies[0].(map[string]interface{}) + + if locked, ok := retentionPolicy["is_locked"]; ok && locked.(bool) && d.HasChange("retention_policy.0.is_locked") { + err = lockRetentionPolicy(config.NewStorageClient(userAgent).Buckets, d.Get("name").(string), res.Metageneration) + if err != nil { + return err + } + } + } + } + + log.Printf("[DEBUG] Patched bucket %v at location %v\n\n", res.Name, res.SelfLink) + + d.SetId(res.Id) + + return nil +} + +func resourceStorageBucketRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + // Get the bucket and acl + bucket := d.Get("name").(string) + + var res *storage.Bucket + // There seems to be some eventual consistency errors in some cases, so we want to check a few times + // to make sure it exists before moving on + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (operr error) { + var retryErr error + res, retryErr = config.NewStorageClient(userAgent).Buckets.Get(bucket).Do() + return retryErr + }, + Timeout: d.Timeout(schema.TimeoutRead), + ErrorRetryPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.IsNotFoundRetryableError("bucket read")}, + }) + + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Storage Bucket %q", d.Get("name").(string))) + } + log.Printf("[DEBUG] Read bucket %v at location %v\n\n", res.Name, res.SelfLink) + + return setStorageBucket(d, config, res, bucket, userAgent) +} + +func resourceStorageBucketDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + // Get the bucket + bucket := d.Get("name").(string) + + var listError, deleteObjectError error + for deleteObjectError == nil { + res, err := config.NewStorageClient(userAgent).Objects.List(bucket).Versions(true).Do() + if err != nil { + log.Printf("Error listing contents of bucket %s: %v", bucket, err) + // If we can't list the contents, try deleting the bucket anyway in case it's empty + listError = err + break + } + + if len(res.Items) == 0 { + break // 0 items, bucket empty + } + + if d.Get("retention_policy.0.is_locked").(bool) { + for _, item := range res.Items { + expiration, err := time.Parse(time.RFC3339, item.RetentionExpirationTime) + if err != nil { + return err + } + if expiration.After(time.Now()) { + deleteErr := errors.New("Bucket '" + d.Get("name").(string) + "' contains objects that have not met the retention period yet and cannot be deleted.") + log.Printf("Error! %s : %s\n\n", bucket, deleteErr) + return deleteErr + } + } + } + + if !d.Get("force_destroy").(bool) { + deleteErr := fmt.Errorf("Error trying to delete bucket %s containing objects without `force_destroy` set to true", bucket) + log.Printf("Error! %s : %s\n\n", bucket, deleteErr) + return deleteErr + } + // GCS requires that a bucket be empty (have no objects or object + // versions) before it can be deleted. + log.Printf("[DEBUG] GCS Bucket attempting to forceDestroy\n\n") + + // Create a workerpool for parallel deletion of resources. In the + // future, it would be great to expose Terraform's global parallelism + // flag here, but that's currently reserved for core use. Testing + // shows that NumCPUs-1 is the most performant on average networks. + // + // The challenge with making this user-configurable is that the + // configuration would reside in the Terraform configuration file, + // decreasing its portability. Ideally we'd want this to connect to + // Terraform's top-level -parallelism flag, but that's not plumbed nor + // is it scheduled to be plumbed to individual providers. + wp := workerpool.New(runtime.NumCPU() - 1) + + for _, object := range res.Items { + log.Printf("[DEBUG] Found %s", object.Name) + object := object + + wp.Submit(func() { + log.Printf("[TRACE] Attempting to delete %s", object.Name) + if err := config.NewStorageClient(userAgent).Objects.Delete(bucket, object.Name).Generation(object.Generation).Do(); err != nil { + deleteObjectError = err + log.Printf("[ERR] Failed to delete storage object %s: %s", object.Name, err) + } else { + log.Printf("[TRACE] Successfully deleted %s", object.Name) + } + }) + } + + // Wait for everything to finish. + wp.StopWait() + } + + // remove empty bucket + err = retry.Retry(1*time.Minute, func() *retry.RetryError { + err := config.NewStorageClient(userAgent).Buckets.Delete(bucket).Do() + if err == nil { + return nil + } + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 429 { + return retry.RetryableError(gerr) + } + return retry.NonRetryableError(err) + }) + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 409 && strings.Contains(gerr.Message, "not empty") && listError != nil { + return fmt.Errorf("could not delete non-empty bucket due to error when listing contents: %v", listError) + } + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 409 && strings.Contains(gerr.Message, "not empty") && deleteObjectError != nil { + return fmt.Errorf("could not delete non-empty bucket due to error when deleting contents: %v", deleteObjectError) + } + if err != nil { + log.Printf("Error deleting bucket %s: %v", bucket, err) + return err + } + log.Printf("[DEBUG] Deleted bucket %v\n\n", bucket) + + return nil +} + +func resourceStorageBucketStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + // We need to support project/bucket_name and bucket_name formats. This will allow + // importing a bucket that is in a different project than the provider default. + // ParseImportID can't be used because having no project will cause an error but it + // is a valid state as the project_id will be retrieved in READ + parts := strings.Split(d.Id(), "/") + if len(parts) == 1 { + if err := d.Set("name", parts[0]); err != nil { + return nil, fmt.Errorf("Error setting name: %s", err) + } + } else if len(parts) > 1 { + if err := d.Set("project", parts[0]); err != nil { + return nil, fmt.Errorf("Error setting project: %s", err) + } + if err := d.Set("name", parts[1]); err != nil { + return nil, fmt.Errorf("Error setting name: %s", err) + } + } + + if err := d.Set("force_destroy", false); err != nil { + return nil, fmt.Errorf("Error setting force_destroy: %s", err) + } + return []*schema.ResourceData{d}, nil +} + +func expandCors(configured []interface{}) []*storage.BucketCors { + if len(configured) == 0 { + return nil + } + corsRules := make([]*storage.BucketCors, 0, len(configured)) + for _, raw := range configured { + data := raw.(map[string]interface{}) + corsRule := storage.BucketCors{ + Origin: tpgresource.ConvertStringArr(data["origin"].([]interface{})), + Method: tpgresource.ConvertStringArr(data["method"].([]interface{})), + ResponseHeader: tpgresource.ConvertStringArr(data["response_header"].([]interface{})), + MaxAgeSeconds: int64(data["max_age_seconds"].(int)), + } + + corsRules = append(corsRules, &corsRule) + } + return corsRules +} + +func flattenCors(corsRules []*storage.BucketCors) []map[string]interface{} { + corsRulesSchema := make([]map[string]interface{}, 0, len(corsRules)) + for _, corsRule := range corsRules { + data := map[string]interface{}{ + "origin": corsRule.Origin, + "method": corsRule.Method, + "response_header": corsRule.ResponseHeader, + "max_age_seconds": corsRule.MaxAgeSeconds, + } + + corsRulesSchema = append(corsRulesSchema, data) + } + return corsRulesSchema +} + +func expandBucketEncryption(configured interface{}) *storage.BucketEncryption { + encs := configured.([]interface{}) + if len(encs) == 0 || encs[0] == nil { + return nil + } + enc := encs[0].(map[string]interface{}) + keyname := enc["default_kms_key_name"] + if keyname == nil || keyname.(string) == "" { + return nil + } + bucketenc := &storage.BucketEncryption{ + DefaultKmsKeyName: keyname.(string), + } + return bucketenc +} + +func flattenBucketEncryption(enc *storage.BucketEncryption) []map[string]interface{} { + encryption := make([]map[string]interface{}, 0, 1) + + if enc == nil { + return encryption + } + + encryption = append(encryption, map[string]interface{}{ + "default_kms_key_name": enc.DefaultKmsKeyName, + }) + + return encryption +} + +func expandBucketCustomPlacementConfig(configured interface{}) *storage.BucketCustomPlacementConfig { + cfcs := configured.([]interface{}) + if len(cfcs) == 0 || cfcs[0] == nil { + return nil + } + cfc := cfcs[0].(map[string]interface{}) + bucketcfc := &storage.BucketCustomPlacementConfig{ + DataLocations: expandBucketDataLocations(cfc["data_locations"]), + } + return bucketcfc +} + +func flattenBucketCustomPlacementConfig(cfc *storage.BucketCustomPlacementConfig) []map[string]interface{} { + customPlacementConfig := make([]map[string]interface{}, 0, 1) + + if cfc == nil { + return customPlacementConfig + } + + customPlacementConfig = append(customPlacementConfig, map[string]interface{}{ + "data_locations": cfc.DataLocations, + }) + + return customPlacementConfig +} + +func expandBucketDataLocations(configured interface{}) []string { + l := configured.(*schema.Set).List() + + // Since we only want uppercase values to prevent unnecessary diffs, we can do a comparison + // to determine whether or not to include the value as part of the request. + + // This extra check comes from the limitations of both DiffStateFunc and StateFunc towards types of Sets,Lists, and Maps. + req := make([]string, 0, len(l)) + for _, raw := range l { + if raw.(string) == strings.ToUpper(raw.(string)) { + req = append(req, raw.(string)) + } + } + return req +} + +func expandBucketLogging(configured interface{}) *storage.BucketLogging { + loggings := configured.([]interface{}) + if len(loggings) == 0 || loggings[0] == nil { + return nil + } + + logging := loggings[0].(map[string]interface{}) + + bucketLogging := &storage.BucketLogging{ + LogBucket: logging["log_bucket"].(string), + LogObjectPrefix: logging["log_object_prefix"].(string), + } + + return bucketLogging +} + +func flattenBucketLogging(bucketLogging *storage.BucketLogging) []map[string]interface{} { + loggings := make([]map[string]interface{}, 0, 1) + + if bucketLogging == nil { + return loggings + } + + logging := map[string]interface{}{ + "log_bucket": bucketLogging.LogBucket, + "log_object_prefix": bucketLogging.LogObjectPrefix, + } + + loggings = append(loggings, logging) + return loggings +} + +func expandBucketRetentionPolicy(configured interface{}) *storage.BucketRetentionPolicy { + retentionPolicies := configured.([]interface{}) + if len(retentionPolicies) == 0 { + return nil + } + retentionPolicy := retentionPolicies[0].(map[string]interface{}) + + bucketRetentionPolicy := &storage.BucketRetentionPolicy{ + IsLocked: retentionPolicy["is_locked"].(bool), + RetentionPeriod: int64(retentionPolicy["retention_period"].(int)), + } + + return bucketRetentionPolicy +} + +func flattenBucketRetentionPolicy(bucketRetentionPolicy *storage.BucketRetentionPolicy) []map[string]interface{} { + bucketRetentionPolicies := make([]map[string]interface{}, 0, 1) + + if bucketRetentionPolicy == nil { + return bucketRetentionPolicies + } + + retentionPolicy := map[string]interface{}{ + "is_locked": bucketRetentionPolicy.IsLocked, + "retention_period": bucketRetentionPolicy.RetentionPeriod, + } + + bucketRetentionPolicies = append(bucketRetentionPolicies, retentionPolicy) + return bucketRetentionPolicies +} + +func flattenBucketObjectRetention(bucketObjectRetention *storage.BucketObjectRetention) bool { + if bucketObjectRetention == nil { + return false + } + if bucketObjectRetention.Mode == "Enabled" { + return true + } + return false +} + +func expandBucketSoftDeletePolicy(configured interface{}) *storage.BucketSoftDeletePolicy{ + configuredSoftDeletePolicies := configured.([]interface{}) + if len(configuredSoftDeletePolicies) == 0 { + return nil + } + configuredSoftDeletePolicy := configuredSoftDeletePolicies[0].(map[string]interface{}) + softDeletePolicy := &storage.BucketSoftDeletePolicy{ + RetentionDurationSeconds: int64(configuredSoftDeletePolicy["retention_duration_seconds"].(int)), + } + softDeletePolicy.ForceSendFields=append(softDeletePolicy.ForceSendFields,"RetentionDurationSeconds") + return softDeletePolicy +} + +func flattenBucketSoftDeletePolicy(softDeletePolicy *storage.BucketSoftDeletePolicy) []map[string]interface{} { + policies := make([]map[string]interface{}, 0, 1) + if softDeletePolicy == nil { + return policies + } + policy := map[string]interface{}{ + "retention_duration_seconds": softDeletePolicy.RetentionDurationSeconds, + "effective_time": softDeletePolicy.EffectiveTime, + } + policies = append(policies, policy) + return policies +} + +func expandBucketVersioning(configured interface{}) *storage.BucketVersioning { + versionings := configured.([]interface{}) + if len(versionings) == 0 { + return nil + } + + versioning := versionings[0].(map[string]interface{}) + + bucketVersioning := &storage.BucketVersioning{} + + bucketVersioning.Enabled = versioning["enabled"].(bool) + bucketVersioning.ForceSendFields = append(bucketVersioning.ForceSendFields, "Enabled") + + return bucketVersioning +} + +func expandBucketAutoclass(configured interface{}) *storage.BucketAutoclass { + autoclassList := configured.([]interface{}) + if len(autoclassList) == 0 { + return nil + } + + autoclass := autoclassList[0].(map[string]interface{}) + + bucketAutoclass := &storage.BucketAutoclass{} + + bucketAutoclass.Enabled = autoclass["enabled"].(bool) + if autoclass["terminal_storage_class"] != "" { + bucketAutoclass.TerminalStorageClass = autoclass["terminal_storage_class"].(string) + } + bucketAutoclass.ForceSendFields = append(bucketAutoclass.ForceSendFields, "Enabled") + + return bucketAutoclass +} + +func flattenBucketVersioning(bucketVersioning *storage.BucketVersioning) []map[string]interface{} { + versionings := make([]map[string]interface{}, 0, 1) + + if bucketVersioning == nil { + return versionings + } + + versioning := map[string]interface{}{ + "enabled": bucketVersioning.Enabled, + } + versionings = append(versionings, versioning) + return versionings +} + +func flattenBucketAutoclass(bucketAutoclass *storage.BucketAutoclass) []map[string]interface{} { + autoclassList := make([]map[string]interface{}, 0, 1) + + if bucketAutoclass == nil { + return autoclassList + } + + autoclass := map[string]interface{}{ + "enabled": bucketAutoclass.Enabled, + "terminal_storage_class": bucketAutoclass.TerminalStorageClass, + } + autoclassList = append(autoclassList, autoclass) + return autoclassList +} + +func flattenBucketLifecycle(d *schema.ResourceData, lifecycle *storage.BucketLifecycle) []map[string]interface{} { + if lifecycle == nil || lifecycle.Rule == nil { + return []map[string]interface{}{} + } + + rules := make([]map[string]interface{}, 0, len(lifecycle.Rule)) + + for index, rule := range lifecycle.Rule { + rules = append(rules, map[string]interface{}{ + "action": schema.NewSet(resourceGCSBucketLifecycleRuleActionHash, []interface{}{flattenBucketLifecycleRuleAction(rule.Action)}), + "condition": schema.NewSet(resourceGCSBucketLifecycleRuleConditionHash, []interface{}{flattenBucketLifecycleRuleCondition(index, d, rule.Condition)}), + }) + } + + return rules +} + +func flattenBucketLifecycleRuleAction(action *storage.BucketLifecycleRuleAction) map[string]interface{} { + return map[string]interface{}{ + "type": action.Type, + "storage_class": action.StorageClass, + } +} + +func flattenBucketLifecycleRuleCondition(index int, d *schema.ResourceData, condition *storage.BucketLifecycleRuleCondition) map[string]interface{} { + ruleCondition := map[string]interface{}{ + "created_before": condition.CreatedBefore, + "matches_storage_class": tpgresource.ConvertStringArrToInterface(condition.MatchesStorageClass), + "num_newer_versions": int(condition.NumNewerVersions), + "custom_time_before": condition.CustomTimeBefore, + "days_since_custom_time": int(condition.DaysSinceCustomTime), + "days_since_noncurrent_time": int(condition.DaysSinceNoncurrentTime), + "noncurrent_time_before": condition.NoncurrentTimeBefore, + "matches_prefix": tpgresource.ConvertStringArrToInterface(condition.MatchesPrefix), + "matches_suffix": tpgresource.ConvertStringArrToInterface(condition.MatchesSuffix), + } + if condition.Age != nil { + ruleCondition["age"] = int(*condition.Age) + } + if condition.IsLive == nil { + ruleCondition["with_state"] = "ANY" + } else { + if *condition.IsLive { + ruleCondition["with_state"] = "LIVE" + } else { + ruleCondition["with_state"] = "ARCHIVED" + } + } + // setting no_age value from state config since it is terraform only variable and not getting value from backend. + if v, ok := d.GetOk(fmt.Sprintf("lifecycle_rule.%d.condition",index)); ok{ + state_condition := v.(*schema.Set).List()[0].(map[string]interface{}) + ruleCondition["no_age"] = state_condition["no_age"].(bool) + ruleCondition["send_days_since_noncurrent_time_if_zero"] = state_condition["send_days_since_noncurrent_time_if_zero"].(bool) + ruleCondition["send_days_since_custom_time_if_zero"] = state_condition["send_days_since_custom_time_if_zero"].(bool) + ruleCondition["send_num_newer_versions_if_zero"] = state_condition["send_num_newer_versions_if_zero"].(bool) + } + + return ruleCondition +} + +func flattenBucketWebsite(website *storage.BucketWebsite) []map[string]interface{} { + if website == nil { + return nil + } + websites := make([]map[string]interface{}, 0, 1) + websites = append(websites, map[string]interface{}{ + "main_page_suffix": website.MainPageSuffix, + "not_found_page": website.NotFoundPage, + }) + + return websites +} + +func expandBucketWebsite(v interface{}) *storage.BucketWebsite { + if v == nil { + return nil + } + vs := v.([]interface{}) + + if len(vs) < 1 || vs[0] == nil { + return nil + } + + website := vs[0].(map[string]interface{}) + w := &storage.BucketWebsite{} + + if v := website["not_found_page"]; v != "" { + w.NotFoundPage = v.(string) + } + + if v := website["main_page_suffix"]; v != "" { + w.MainPageSuffix = v.(string) + } + return w +} + +func expandIamConfiguration(d *schema.ResourceData) *storage.BucketIamConfiguration { + cfg := &storage.BucketIamConfiguration{ + ForceSendFields: []string{"UniformBucketLevelAccess"}, + UniformBucketLevelAccess: &storage.BucketIamConfigurationUniformBucketLevelAccess{ + Enabled: d.Get("uniform_bucket_level_access").(bool), + ForceSendFields: []string{"Enabled"}, + }, + } + + if v, ok := d.GetOk("public_access_prevention"); ok { + cfg.PublicAccessPrevention = v.(string) + } + + return cfg +} + +func expandStorageBucketLifecycle(v interface{}) (*storage.BucketLifecycle, error) { + if v == nil { + return &storage.BucketLifecycle{ + ForceSendFields: []string{"Rule"}, + }, nil + } + lifecycleRules := v.([]interface{}) + transformedRules := make([]*storage.BucketLifecycleRule, 0, len(lifecycleRules)) + + for _, v := range lifecycleRules { + rule, err := expandStorageBucketLifecycleRule(v) + if err != nil { + return nil, err + } + transformedRules = append(transformedRules, rule) + } + + if len(transformedRules) == 0 { + return &storage.BucketLifecycle{ + ForceSendFields: []string{"Rule"}, + }, nil + } + + return &storage.BucketLifecycle{ + Rule: transformedRules, + }, nil +} + +func expandStorageBucketLifecycleRule(v interface{}) (*storage.BucketLifecycleRule, error) { + if v == nil { + return nil, nil + } + + rule := v.(map[string]interface{}) + transformed := &storage.BucketLifecycleRule{} + + if v, ok := rule["action"]; ok { + action, err := expandStorageBucketLifecycleRuleAction(v) + if err != nil { + return nil, err + } + transformed.Action = action + } else { + return nil, fmt.Errorf("exactly one action is required for lifecycle_rule") + } + + if v, ok := rule["condition"]; ok { + cond, err := expandStorageBucketLifecycleRuleCondition(v) + if err != nil { + return nil, err + } + transformed.Condition = cond + } + + return transformed, nil +} + +func expandStorageBucketLifecycleRuleAction(v interface{}) (*storage.BucketLifecycleRuleAction, error) { + if v == nil { + return nil, fmt.Errorf("exactly one action is required for lifecycle_rule") + } + + actions := v.(*schema.Set).List() + if len(actions) != 1 { + return nil, fmt.Errorf("exactly one action is required for lifecycle_rule") + } + + action := actions[0].(map[string]interface{}) + transformed := &storage.BucketLifecycleRuleAction{} + + if v, ok := action["type"]; ok { + transformed.Type = v.(string) + } + + if v, ok := action["storage_class"]; ok { + transformed.StorageClass = v.(string) + } + + return transformed, nil +} + +func expandStorageBucketLifecycleRuleCondition(v interface{}) (*storage.BucketLifecycleRuleCondition, error) { + if v == nil { + return nil, nil + } + conditions := v.(*schema.Set).List() + if len(conditions) != 1 { + return nil, fmt.Errorf("One and only one condition can be provided per lifecycle_rule") + } + + condition := conditions[0].(map[string]interface{}) + transformed := &storage.BucketLifecycleRuleCondition{} + // Setting high precedence of no_age over age when both used together. + // Only sets age value when no_age is not present or no_age is present and has false value + if v, ok := condition["no_age"]; !ok || !(v.(bool)) { + if v, ok := condition["age"]; ok { + age := int64(v.(int)) + transformed.Age = &age + transformed.ForceSendFields = append(transformed.ForceSendFields, "Age") + } + } + + if v, ok := condition["created_before"]; ok { + transformed.CreatedBefore = v.(string) + } + + withStateV, withStateOk := condition["with_state"] + // Because TF schema, withStateOk currently will always be true, + // do the check just in case. + if withStateOk { + switch withStateV.(string) { + case "LIVE": + transformed.IsLive = googleapi.Bool(true) + case "ARCHIVED": + transformed.IsLive = googleapi.Bool(false) + case "ANY", "": + // This is unnecessary, but set explicitly to nil for readability. + transformed.IsLive = nil + default: + return nil, fmt.Errorf("unexpected value %q for condition.with_state", withStateV.(string)) + } + } + + if v, ok := condition["matches_storage_class"]; ok { + classes := v.([]interface{}) + transformedClasses := make([]string, 0, len(classes)) + + for _, v := range classes { + transformedClasses = append(transformedClasses, v.(string)) + } + transformed.MatchesStorageClass = transformedClasses + } + + if v, ok := condition["num_newer_versions"]; ok { + transformed.NumNewerVersions = int64(v.(int)) + if u, ok := condition["send_num_newer_versions_if_zero"]; ok && u.(bool) { + transformed.ForceSendFields = append(transformed.ForceSendFields, "NumNewerVersions") + } + } + + if v, ok := condition["custom_time_before"]; ok { + transformed.CustomTimeBefore = v.(string) + } + + if v, ok := condition["days_since_custom_time"]; ok { + transformed.DaysSinceCustomTime = int64(v.(int)) + if u, ok := condition["send_days_since_custom_time_if_zero"]; ok && u.(bool) { + transformed.ForceSendFields = append(transformed.ForceSendFields, "DaysSinceCustomTime") + } + } + + if v, ok := condition["days_since_noncurrent_time"]; ok { + transformed.DaysSinceNoncurrentTime = int64(v.(int)) + if u, ok := condition["send_days_since_noncurrent_time_if_zero"]; ok && u.(bool) { + transformed.ForceSendFields = append(transformed.ForceSendFields, "DaysSinceNoncurrentTime") + } + } + + if v, ok := condition["noncurrent_time_before"]; ok { + transformed.NoncurrentTimeBefore = v.(string) + } + + if v, ok := condition["matches_prefix"]; ok { + prefixes := v.([]interface{}) + transformedPrefixes := make([]string, 0, len(prefixes)) + + for _, v := range prefixes { + transformedPrefixes = append(transformedPrefixes, v.(string)) + } + transformed.MatchesPrefix = transformedPrefixes + } + if v, ok := condition["matches_suffix"]; ok { + suffixes := v.([]interface{}) + transformedSuffixes := make([]string, 0, len(suffixes)) + + for _, v := range suffixes { + transformedSuffixes = append(transformedSuffixes, v.(string)) + } + transformed.MatchesSuffix = transformedSuffixes + } + + return transformed, nil +} + +func resourceGCSBucketLifecycleRuleActionHash(v interface{}) int { + if v == nil { + return 0 + } + + var buf bytes.Buffer + m := v.(map[string]interface{}) + + buf.WriteString(fmt.Sprintf("%s-", m["type"].(string))) + + if v, ok := m["storage_class"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + return tpgresource.Hashcode(buf.String()) +} + +func resourceGCSBucketLifecycleRuleConditionHash(v interface{}) int { + if v == nil { + return 0 + } + + var buf bytes.Buffer + m := v.(map[string]interface{}) + + if v, ok := m["no_age"]; ok && v.(bool){ + buf.WriteString(fmt.Sprintf("%t-", v.(bool))) + } else { + if v, ok := m["age"]; ok { + buf.WriteString(fmt.Sprintf("%d-", v.(int))) + } + } + + if v, ok := m["days_since_custom_time"]; ok { + buf.WriteString(fmt.Sprintf("%d-", v.(int))) + } + + if v, ok := m["days_since_noncurrent_time"]; ok { + buf.WriteString(fmt.Sprintf("%d-", v.(int))) + } + + if v, ok := m["created_before"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + if v, ok := m["custom_time_before"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + if v, ok := m["noncurrent_time_before"]; ok { + buf.WriteString(fmt.Sprintf("%s-", v.(string))) + } + + withStateV, withStateOk := m["with_state"] + if withStateOk { + switch withStateV.(string) { + case "LIVE": + buf.WriteString(fmt.Sprintf("%t-", true)) + case "ARCHIVED": + buf.WriteString(fmt.Sprintf("%t-", false)) + } + } + + if v, ok := m["matches_storage_class"]; ok { + matches_storage_classes := v.([]interface{}) + for _, matches_storage_class := range matches_storage_classes { + buf.WriteString(fmt.Sprintf("%s-", matches_storage_class)) + } + } + + if v, ok := m["num_newer_versions"]; ok { + buf.WriteString(fmt.Sprintf("%d-", v.(int))) + } + + if v, ok := m["send_days_since_noncurrent_time_if_zero"]; ok { + buf.WriteString(fmt.Sprintf("%t-", v.(bool))) + } + + if v, ok := m["send_days_since_custom_time_if_zero"]; ok { + buf.WriteString(fmt.Sprintf("%t-", v.(bool))) + } + + if v, ok := m["send_num_newer_versions_if_zero"]; ok { + buf.WriteString(fmt.Sprintf("%t-", v.(bool))) + } + + if v, ok := m["matches_prefix"]; ok { + matches_prefixes := v.([]interface{}) + for _, matches_prefix := range matches_prefixes { + buf.WriteString(fmt.Sprintf("%s-", matches_prefix)) + } + } + if v, ok := m["matches_suffix"]; ok { + matches_suffixes := v.([]interface{}) + for _, matches_suffix := range matches_suffixes { + buf.WriteString(fmt.Sprintf("%s-", matches_suffix)) + } + } + + return tpgresource.Hashcode(buf.String()) +} + +func lockRetentionPolicy(bucketsService *storage.BucketsService, bucketName string, metageneration int64) error { + lockPolicyCall := bucketsService.LockRetentionPolicy(bucketName, metageneration) + if _, err := lockPolicyCall.Do(); err != nil { + return err + } + + return nil +} + +// d.HasChange("lifecycle_rule") always returns true, giving false positives. This function detects changes +// to the list size or the actions/conditions of rules directly. +func detectLifecycleChange(d *schema.ResourceData) bool { + if d.HasChange("lifecycle_rule.#") { + return true + } + + if l, ok := d.GetOk("lifecycle_rule"); ok { + lifecycleRules := l.([]interface{}) + for i := range lifecycleRules { + if d.HasChange(fmt.Sprintf("lifecycle_rule.%d.action", i)) || d.HasChange(fmt.Sprintf("lifecycle_rule.%d.condition", i)) { + return true + } + } + } + + return false +} + +// Resource Read and DataSource Read both need to set attributes, but Data Sources don't support Timeouts +// so we pulled this portion out separately (https://github.com/hashicorp/terraform-provider-google/issues/11264) +func setStorageBucket(d *schema.ResourceData, config *transport_tpg.Config, res *storage.Bucket, bucket, userAgent string) error { + // We are trying to support several different use cases for bucket. Buckets are globally + // unique but they are associated with projects internally, but some users want to use + // buckets in a project agnostic way. Thus we will check to see if the project ID has been + // explicitly set and use that first. However if no project is explicitly set, such as during + // import, we will look up the ID from the compute API using the project Number from the + // bucket API response. + // If you are working in a project-agnostic way and have not set the project ID in the provider + // block, or the resource or an environment variable, we use the compute API to lookup the projectID + // from the projectNumber which is included in the bucket API response + if d.Get("project") == "" { + project, _ := tpgresource.GetProject(d, config) + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + } + if d.Get("project") == "" { + proj, err := config.NewComputeClient(userAgent).Projects.Get(strconv.FormatUint(res.ProjectNumber, 10)).Do() + if err != nil { + return err + } + log.Printf("[DEBUG] Bucket %v is in project number %v, which is project ID %s.\n", res.Name, res.ProjectNumber, proj.Name) + if err := d.Set("project", proj.Name); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + } + + // Update the bucket ID according to the resource ID + if err := d.Set("self_link", res.SelfLink); err != nil { + return fmt.Errorf("Error setting self_link: %s", err) + } + if err := d.Set("url", fmt.Sprintf("gs://%s", bucket)); err != nil { + return fmt.Errorf("Error setting url: %s", err) + } + if err := d.Set("project_number", res.ProjectNumber); err != nil { + return fmt.Errorf("Error setting project_number: %s", err) + } + if err := d.Set("storage_class", res.StorageClass); err != nil { + return fmt.Errorf("Error setting storage_class: %s", err) + } + if err := d.Set("encryption", flattenBucketEncryption(res.Encryption)); err != nil { + return fmt.Errorf("Error setting encryption: %s", err) + } + if err := d.Set("location", res.Location); err != nil { + return fmt.Errorf("Error setting location: %s", err) + } + if err := d.Set("cors", flattenCors(res.Cors)); err != nil { + return fmt.Errorf("Error setting cors: %s", err) + } + if err := d.Set("default_event_based_hold", res.DefaultEventBasedHold); err != nil { + return fmt.Errorf("Error setting default_event_based_hold: %s", err) + } + if err := d.Set("logging", flattenBucketLogging(res.Logging)); err != nil { + return fmt.Errorf("Error setting logging: %s", err) + } + if err := d.Set("enable_object_retention", flattenBucketObjectRetention(res.ObjectRetention)); err != nil { + return fmt.Errorf("Error setting object retention: %s", err) + } + if err := d.Set("versioning", flattenBucketVersioning(res.Versioning)); err != nil { + return fmt.Errorf("Error setting versioning: %s", err) + } + if err := d.Set("autoclass", flattenBucketAutoclass(res.Autoclass)); err != nil { + return fmt.Errorf("Error setting autoclass: %s", err) + } + // lifecycle_rule contains terraform only variable no_age. + // Passing config("d") to flattener function to set no_age separately. + if err := d.Set("lifecycle_rule", flattenBucketLifecycle(d, res.Lifecycle)); err != nil { + return fmt.Errorf("Error setting lifecycle_rule: %s", err) + } + if err := tpgresource.SetLabels(res.Labels, d, "labels"); err != nil { + return fmt.Errorf("Error setting labels: %s", err) + } + if err := tpgresource.SetLabels(res.Labels, d, "terraform_labels"); err != nil { + return fmt.Errorf("Error setting terraform_labels: %s", err) + } + if err := d.Set("effective_labels", res.Labels); err != nil { + return fmt.Errorf("Error setting labels: %s", err) + } + if err := d.Set("website", flattenBucketWebsite(res.Website)); err != nil { + return fmt.Errorf("Error setting website: %s", err) + } + if err := d.Set("retention_policy", flattenBucketRetentionPolicy(res.RetentionPolicy)); err != nil { + return fmt.Errorf("Error setting retention_policy: %s", err) + } + if err := d.Set("custom_placement_config", flattenBucketCustomPlacementConfig(res.CustomPlacementConfig)); err != nil { + return fmt.Errorf("Error setting custom_placement_config: %s", err) + } + // Needs to hide rpo field for single-region buckets. + // Check the Rpo field from API response to determine whether bucket is in single region config or not. + if res.Rpo != "" { + if err := d.Set("rpo", res.Rpo); err != nil { + return fmt.Errorf("Error setting RPO setting : %s", err) + } + } + if err := d.Set("soft_delete_policy", flattenBucketSoftDeletePolicy(res.SoftDeletePolicy)); err != nil { + return fmt.Errorf("Error setting soft_delete_policy: %s", err) + } + if res.IamConfiguration != nil && res.IamConfiguration.UniformBucketLevelAccess != nil { + if err := d.Set("uniform_bucket_level_access", res.IamConfiguration.UniformBucketLevelAccess.Enabled); err != nil { + return fmt.Errorf("Error setting uniform_bucket_level_access: %s", err) + } + } else { + if err := d.Set("uniform_bucket_level_access", false); err != nil { + return fmt.Errorf("Error setting uniform_bucket_level_access: %s", err) + } + } + + if res.IamConfiguration != nil && res.IamConfiguration.PublicAccessPrevention != "" { + if err := d.Set("public_access_prevention", res.IamConfiguration.PublicAccessPrevention); err != nil { + return fmt.Errorf("Error setting public_access_prevention: %s", err) + } + } + + if res.Billing == nil { + if err := d.Set("requester_pays", nil); err != nil { + return fmt.Errorf("Error setting requester_pays: %s", err) + } + } else { + if err := d.Set("requester_pays", res.Billing.RequesterPays); err != nil { + return fmt.Errorf("Error setting requester_pays: %s", err) + } + } + + d.SetId(res.Id) + return nil +} diff --git a/mmv1/third_party/terraform/services/storage/go/resource_storage_bucket_test.go b/mmv1/third_party/terraform/services/storage/go/resource_storage_bucket_test.go new file mode 100644 index 000000000000..caf628772936 --- /dev/null +++ b/mmv1/third_party/terraform/services/storage/go/resource_storage_bucket_test.go @@ -0,0 +1,2636 @@ +package storage_test + +import ( + "bytes" + "fmt" + "log" + "regexp" + "testing" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" + + "google.golang.org/api/googleapi" + "google.golang.org/api/storage/v1" +) + +func TestAccStorageBucket_basic(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "false"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "project", envvar.GetTestProjectFromEnv()), + resource.TestCheckResourceAttrSet( + "google_storage_bucket.bucket", "project_number"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportStateId: fmt.Sprintf("%s/%s", envvar.GetTestProjectFromEnv(), bucketName), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_basicWithAutoclass(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + var updated storage.Bucket + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_basicWithAutoclass(bucketName, true), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_basicWithAutoclass_update(bucketName, true), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + testAccCheckStorageBucketWasUpdated(&updated, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_basicWithAutoclass(bucketName, false), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_AutoclassDiffSupress(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_basicWithAutoclass(bucketName,false), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_basicWithAutoclass(bucketName,true), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_requesterPays(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-requester-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_requesterPays(bucketName, true), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "requester_pays", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_lowercaseLocation(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_lowercaseLocation(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_dualLocation(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_dualLocation(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_dualLocation_lowercase(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_dualLocation_lowercase(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_dualLocation_versionChange(t *testing.T) { + // Test is not parallel because ENVs are set. + // Need to skip VCR as this test downloads providers from the Terraform Registry + acctest.SkipIfVcr(t) + + creds := envvar.GetTestCredsFromEnv() + project := envvar.GetTestProjectFromEnv() + t.Setenv("GOOGLE_CREDENTIALS", creds) + t.Setenv("GOOGLE_PROJECT", project) + + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_dualLocation(bucketName), + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "5.30.0", + Source: "hashicorp/google", + }, + }, + }, + { + ResourceName: "google_storage_bucket.bucket", + ExternalProviders: map[string]resource.ExternalProvider{ + "google": { + VersionConstraint: "5.30.0", + Source: "hashicorp/google", + }, + }, + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_dualLocation(bucketName), + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + }, + { + ResourceName: "google_storage_bucket.bucket", + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_dualLocation_rpo(t *testing.T) { + t.Parallel() + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_dualLocation(bucketName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "rpo", "DEFAULT"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_dualLocation_rpo(bucketName,"ASYNC_TURBO"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "rpo", "ASYNC_TURBO"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_dualLocation_rpo(bucketName,"DEFAULT"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "rpo", "DEFAULT"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_multiLocation_rpo(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "rpo", "DEFAULT"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_multiLocation_rpo(bucketName,"DEFAULT"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "rpo", "DEFAULT"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_customAttributes(t *testing.T) { + t.Parallel() + + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_lifecycleRulesMultiple(t *testing.T) { + // multiple fine-grained resources + acctest.SkipIfVcr(t) + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_lifecycleRulesMultiple(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_lifecycleRulesMultiple_update(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_lifecycleRuleStateLive(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_lifecycleRule_withStateLive(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionState(googleapi.Bool(true), &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_lifecycleRuleStateArchived(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_lifecycleRule_emptyArchived(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionState(nil, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_lifecycleRule_withStateArchived(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionState(googleapi.Bool(false), &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_lifecycleRuleStateAny(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_lifecycleRule_withStateArchived(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionState(googleapi.Bool(false), &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_lifecycleRule_withStateLive(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionState(googleapi.Bool(true), &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_lifecycleRule_withStateAny(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionState(nil, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_lifecycleRule_withStateArchived(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionState(googleapi.Bool(false), &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_lifecycleRulesVirtualFields(t *testing.T) { + t.Parallel() + var bucket storage.Bucket + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_customAttributes_withLifecycle1(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_customAttributes_withLifecycleVirtualFieldsUpdate1(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionNoAge(nil, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.1.condition.0.no_age","lifecycle_rule.1.condition.0.send_days_since_noncurrent_time_if_zero","lifecycle_rule.2.condition.0.send_days_since_noncurrent_time_if_zero","lifecycle_rule.1.condition.0.send_days_since_custom_time_if_zero","lifecycle_rule.2.condition.0.send_days_since_custom_time_if_zero","lifecycle_rule.1.condition.0.send_num_newer_versions_if_zero","lifecycle_rule.2.condition.0.send_num_newer_versions_if_zero"}, + }, + { + Config: testAccStorageBucket_customAttributes_withLifecycleVirtualFieldsUpdate2(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketLifecycleConditionNoAge(nil, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy","lifecycle_rule.1.condition.0.no_age","lifecycle_rule.0.condition.0.send_days_since_noncurrent_time_if_zero","lifecycle_rule.0.condition.0.send_days_since_custom_time_if_zero","lifecycle_rule.0.condition.0.send_num_newer_versions_if_zero"}, + }, + { + Config: testAccStorageBucket_customAttributes_withLifecycle1(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_storageClass(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + var updated storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_storageClass(bucketName, "MULTI_REGIONAL", "US"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_storageClass(bucketName, "NEARLINE", "US"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + // storage_class-only change should not recreate + testAccCheckStorageBucketWasUpdated(&updated, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_storageClass(bucketName, "REGIONAL", "US-CENTRAL1"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + // Location change causes recreate + testAccCheckStorageBucketWasRecreated(&updated, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_update_requesterPays(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + var updated storage.Bucket + bucketName := fmt.Sprintf("tf-test-requester-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_requesterPays(bucketName, true), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_requesterPays(bucketName, false), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + testAccCheckStorageBucketWasUpdated(&updated, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_update(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + var recreated storage.Bucket + var updated storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "false"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &recreated), + testAccCheckStorageBucketWasRecreated(&recreated, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_customAttributes_withLifecycle1(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + testAccCheckStorageBucketWasUpdated(&updated, &recreated), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_customAttributes_withLifecycle2(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + testAccCheckStorageBucketWasUpdated(&updated, &recreated), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_customAttributes_withLifecycle1Update(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + testAccCheckStorageBucketWasUpdated(&updated, &recreated), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &updated), + testAccCheckStorageBucketWasUpdated(&updated, &recreated), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "force_destroy", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_forceDestroy(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + Config: testAccStorageBucket_customAttributes(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketPutItem(t, bucketName), + ), + }, + { + Config: testAccStorageBucket_customAttributes(fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t))), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketMissing(t, bucketName), + ), + }, + }, + }) +} + +func TestAccStorageBucket_forceDestroyWithVersioning(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_forceDestroyWithVersioning(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + Config: testAccStorageBucket_forceDestroyWithVersioning(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketPutItem(t, bucketName), + ), + }, + { + Config: testAccStorageBucket_forceDestroyWithVersioning(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketPutItem(t, bucketName), + ), + }, + }, + }) +} + +func TestAccStorageBucket_forceDestroyObjectDeleteError(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_forceDestroyWithRetentionPolicy(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketPutItem(t, bucketName), + ), + }, + { + Config: testAccStorageBucket_forceDestroyWithRetentionPolicy(bucketName), + Destroy: true, + ExpectError: regexp.MustCompile("could not delete non-empty bucket due to error when deleting contents"), + }, + { + Config: testAccStorageBucket_forceDestroy(bucketName), + }, + }, + }) +} + +func TestAccStorageBucket_enable_object_retention(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := acctest.TestBucketName(t) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_enable_object_retention(bucketName, "true"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_enable_object_retention(bucketName, "false"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_versioning(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_versioning(bucketName, "true"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.0.enabled", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_versioning_empty(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.0.enabled", "true"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_versioning(bucketName, "false"), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.0.enabled", "false"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_versioning_empty(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "versioning.0.enabled", "false"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_logging(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_logging(bucketName, "log-bucket"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.0.log_bucket", "log-bucket"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.0.log_object_prefix", bucketName), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_loggingWithPrefix(bucketName, "another-log-bucket", "object-prefix"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.#", "1"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.0.log_bucket", "another-log-bucket"), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.0.log_object_prefix", "object-prefix"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "logging.#", "0"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_cors(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testGoogleStorageBucketsCors(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_basic(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_defaultEventBasedHold(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_defaultEventBasedHold(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_encryption(t *testing.T) { + // when rotation is set, next rotation time is set using time.Now + acctest.SkipIfVcr(t) + t.Parallel() + + context := map[string]interface{}{ + "organization": envvar.GetTestOrgFromEnv(t), + "billing_account": envvar.GetTestBillingAccountFromEnv(t), + "random_suffix": acctest.RandString(t, 10), + "random_int": acctest.RandInt(t), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_encryption(context), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_publicAccessPrevention(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_publicAccessPrevention(bucketName, "enforced"), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_uniformBucketAccessOnly(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_uniformBucketAccessOnly(bucketName, true), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_uniformBucketAccessOnly(bucketName, false), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_labels(t *testing.T) { + t.Parallel() + + bucketName := fmt.Sprintf("tf-test-acl-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + // Going from two labels + { + Config: testAccStorageBucket_updateLabels(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy", "labels", "terraform_labels"}, + }, + // Down to only one label (test single label deletion) + { + Config: testAccStorageBucket_labels(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy", "labels", "terraform_labels"}, + }, + // And make sure deleting all labels work + { + Config: testAccStorageBucket_basic(bucketName), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccStorageBucket_retentionPolicy(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_retentionPolicy(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketRetentionPolicy(t, bucketName), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_website(t *testing.T) { + t.Parallel() + + bucketSuffix := fmt.Sprintf("tf-website-test-%d", acctest.RandInt(t)) + errRe := regexp.MustCompile("one of\n`website.0.main_page_suffix,website.0.not_found_page` must be specified") + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_websiteNoAttributes(bucketSuffix), + ExpectError: errRe, + }, + { + Config: testAccStorageBucket_websiteOneAttribute(bucketSuffix), + }, + { + ResourceName: "google_storage_bucket.website", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_websiteOneAttributeUpdate(bucketSuffix), + }, + { + ResourceName: "google_storage_bucket.website", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_website(bucketSuffix), + }, + { + ResourceName: "google_storage_bucket.website", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_websiteRemoved(bucketSuffix), + }, + { + ResourceName: "google_storage_bucket.website", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func TestAccStorageBucket_retentionPolicyLocked(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + var newBucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_lockedRetentionPolicy(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + testAccCheckStorageBucketRetentionPolicy(t, bucketName), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_retentionPolicy(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &newBucket), + testAccCheckStorageBucketWasRecreated(&newBucket, &bucket), + ), + }, + }, + }) +} + +func TestAccStorageBucket_SoftDeletePolicy(t *testing.T) { + t.Parallel() + + var bucket storage.Bucket + bucketName := fmt.Sprintf("tf-test-acc-bucket-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccStorageBucketDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccStorageBucket_basic(bucketName), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "soft_delete_policy.0.retention_duration_seconds", "604800"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_SoftDeletePolicy(bucketName,7776000), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "soft_delete_policy.0.retention_duration_seconds", "7776000"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + { + Config: testAccStorageBucket_SoftDeletePolicy(bucketName,0), + Check: resource.ComposeTestCheckFunc( + testAccCheckStorageBucketExists( + t, "google_storage_bucket.bucket", bucketName, &bucket), + resource.TestCheckResourceAttr( + "google_storage_bucket.bucket", "soft_delete_policy.0.retention_duration_seconds", "0"), + ), + }, + { + ResourceName: "google_storage_bucket.bucket", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"force_destroy"}, + }, + }, + }) +} + +func testAccCheckStorageBucketExists(t *testing.T, n string, bucketName string, bucket *storage.Bucket) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("Not found: %s", n) + } + + if rs.Primary.ID == "" { + return fmt.Errorf("No Project_ID is set") + } + + config := acctest.GoogleProviderConfig(t) + + found, err := config.NewStorageClient(config.UserAgent).Buckets.Get(rs.Primary.ID).Do() + if err != nil { + return err + } + + if found.Id != rs.Primary.ID { + return fmt.Errorf("Bucket not found") + } + + if found.Name != bucketName { + return fmt.Errorf("expected name %s, got %s", bucketName, found.Name) + } + + *bucket = *found + return nil + } +} + +func testAccCheckStorageBucketWasUpdated(newBucket *storage.Bucket, b *storage.Bucket) resource.TestCheckFunc { + return func(s *terraform.State) error { + if newBucket.TimeCreated != b.TimeCreated { + return fmt.Errorf("expected storage bucket to have been updated (had same creation time), instead was recreated - old creation time %s, new creation time %s", newBucket.TimeCreated, b.TimeCreated) + } + return nil + } +} + +func testAccCheckStorageBucketWasRecreated(newBucket *storage.Bucket, b *storage.Bucket) resource.TestCheckFunc { + return func(s *terraform.State) error { + if newBucket.TimeCreated == b.TimeCreated { + return fmt.Errorf("expected storage bucket to have been recreated, instead had same creation time (%s)", b.TimeCreated) + } + return nil + } +} + +func testAccCheckStorageBucketPutItem(t *testing.T, bucketName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + data := bytes.NewBufferString("test") + dataReader := bytes.NewReader(data.Bytes()) + object := &storage.Object{Name: "bucketDestroyTestFile"} + + // This needs to use Media(io.Reader) call, otherwise it does not go to /upload API and fails + if res, err := config.NewStorageClient(config.UserAgent).Objects.Insert(bucketName, object).Media(dataReader).Do(); err == nil { + log.Printf("[INFO] Created object %v at location %v\n\n", res.Name, res.SelfLink) + } else { + return fmt.Errorf("Objects.Insert failed: %v", err) + } + + return nil + } +} + +func testAccCheckStorageBucketRetentionPolicy(t *testing.T, bucketName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + data := bytes.NewBufferString("test") + dataReader := bytes.NewReader(data.Bytes()) + object := &storage.Object{Name: "bucketDestroyTestFile"} + + // This needs to use Media(io.Reader) call, otherwise it does not go to /upload API and fails + if res, err := config.NewStorageClient(config.UserAgent).Objects.Insert(bucketName, object).Media(dataReader).Do(); err == nil { + log.Printf("[INFO] Created object %v at location %v\n\n", res.Name, res.SelfLink) + } else { + return fmt.Errorf("Objects.Insert failed: %v", err) + } + + // Test deleting immediately, this should fail because of the 10 second retention + if err := config.NewStorageClient(config.UserAgent).Objects.Delete(bucketName, objectName).Do(); err == nil { + return fmt.Errorf("Objects.Delete succeeded: %v", object.Name) + } + + // Wait 10 seconds and delete again + time.Sleep(10000 * time.Millisecond) + + if err := config.NewStorageClient(config.UserAgent).Objects.Delete(bucketName, object.Name).Do(); err == nil { + log.Printf("[INFO] Deleted object %v at location %v\n\n", object.Name, object.SelfLink) + } else { + return fmt.Errorf("Objects.Delete failed: %v", err) + } + + return nil + } +} + +func testAccCheckStorageBucketMissing(t *testing.T, bucketName string) resource.TestCheckFunc { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + _, err := config.NewStorageClient(config.UserAgent).Buckets.Get(bucketName).Do() + if err == nil { + return fmt.Errorf("Found %s", bucketName) + } + + if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 404 { + return nil + } + + return err + } +} + +func testAccCheckStorageBucketLifecycleConditionState(expected *bool, b *storage.Bucket) resource.TestCheckFunc { + return func(s *terraform.State) error { + actual := b.Lifecycle.Rule[0].Condition.IsLive + if expected == nil && b.Lifecycle.Rule[0].Condition.IsLive == nil { + return nil + } + if expected == nil { + return fmt.Errorf("expected condition isLive to be unset, instead got %t", *actual) + } + if actual == nil { + return fmt.Errorf("expected condition isLive to be %t, instead got nil (unset)", *expected) + } + if *expected != *actual { + return fmt.Errorf("expected condition isLive to be %t, instead got %t", *expected, *actual) + } + return nil + } +} + +func testAccCheckStorageBucketLifecycleConditionNoAge(expected *int64, b *storage.Bucket) resource.TestCheckFunc { + return func(s *terraform.State) error { + actual := b.Lifecycle.Rule[1].Condition.Age + if expected == nil && b.Lifecycle.Rule[1].Condition.Age == nil { + return nil + } + if expected == nil { + return fmt.Errorf("expected condition Age to be unset, instead got %d", *actual) + } + if actual == nil { + return fmt.Errorf("expected condition Age to be %d, instead got nil (unset)", *expected) + } + if *expected != *actual { + return fmt.Errorf("expected condition Age to be %d, instead got %d", *expected, *actual) + } + return nil + } +} + +func testAccStorageBucketDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + config := acctest.GoogleProviderConfig(t) + + for _, rs := range s.RootModule().Resources { + if rs.Type != "google_storage_bucket" { + continue + } + + _, err := config.NewStorageClient(config.UserAgent).Buckets.Get(rs.Primary.ID).Do() + if err == nil { + return fmt.Errorf("Bucket still exists") + } + } + + return nil + } +} + +func testAccStorageBucket_basic(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" +} +`, bucketName) +} + +func testAccStorageBucket_basicWithAutoclass(bucketName string, autoclass bool) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + autoclass { + enabled = %t + } +} +`, bucketName, autoclass) +} + +func testAccStorageBucket_basicWithAutoclass_update(bucketName string, autoclass bool) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + autoclass { + enabled = %t + terminal_storage_class = "ARCHIVE" + } +} +`, bucketName, autoclass) +} + +func testAccStorageBucket_requesterPays(bucketName string, pays bool) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + requester_pays = %t + force_destroy = true +} +`, bucketName, pays) +} + +func testAccStorageBucket_lowercaseLocation(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "eu" + force_destroy = true +} +`, bucketName) +} + +func testAccStorageBucket_dualLocation(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "ASIA" + force_destroy = true + custom_placement_config { + data_locations = ["ASIA-EAST1", "ASIA-SOUTHEAST1"] + } +} +`, bucketName) +} + +func testAccStorageBucket_dualLocation_lowercase(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "ASIA" + force_destroy = true + custom_placement_config { + data_locations = ["asia-east1", "asia-southeast1"] + } +} +`, bucketName) +} + +func testAccStorageBucket_dualLocation_rpo(bucketName string,rpo string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "ASIA" + force_destroy = true + custom_placement_config { + data_locations = ["ASIA-EAST1", "ASIA-SOUTHEAST1"] + } + rpo = "%s" +} +`, bucketName,rpo) +} + +func testAccStorageBucket_multiLocation_rpo(bucketName string,rpo string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "ASIA" + force_destroy = true + rpo = "%s" +} +`, bucketName,rpo) +} + +func testAccStorageBucket_customAttributes(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" +} +`, bucketName) +} + +func testAccStorageBucket_customAttributes_withLifecycle1(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_customAttributes_withLifecycle1Update(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 0 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_customAttributes_withLifecycle2(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + age = 2 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + num_newer_versions = 2 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_customAttributes_withLifecycleVirtualFieldsUpdate1(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + no_age = false + days_since_noncurrent_time = 0 + send_days_since_noncurrent_time_if_zero = false + days_since_custom_time = 0 + send_days_since_custom_time_if_zero = false + num_newer_versions = 0 + send_num_newer_versions_if_zero = false + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + no_age = true + days_since_noncurrent_time = 0 + send_days_since_noncurrent_time_if_zero = true + days_since_custom_time = 0 + send_days_since_custom_time_if_zero = true + num_newer_versions = 0 + send_num_newer_versions_if_zero = true + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + send_days_since_noncurrent_time_if_zero = true + send_days_since_custom_time_if_zero = true + send_num_newer_versions_if_zero = true + } + } +} +`, bucketName) +} + +func testAccStorageBucket_customAttributes_withLifecycleVirtualFieldsUpdate2(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "EU" + force_destroy = "true" + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + no_age = false + days_since_noncurrent_time = 0 + send_days_since_noncurrent_time_if_zero = true + days_since_custom_time = 0 + send_days_since_custom_time_if_zero = true + num_newer_versions = 0 + send_num_newer_versions_if_zero = true + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + age = 10 + no_age = true + custom_time_before = "2022-09-01" + days_since_noncurrent_time = 0 + send_days_since_noncurrent_time_if_zero = false + days_since_custom_time = 0 + send_days_since_custom_time_if_zero = false + num_newer_versions = 0 + send_num_newer_versions_if_zero = false + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + custom_time_before = "2022-09-01" + send_days_since_noncurrent_time_if_zero = false + send_days_since_custom_time_if_zero = false + send_num_newer_versions_if_zero = false + } + } +} +`, bucketName) +} + +func testAccStorageBucket_storageClass(bucketName, storageClass, location string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + storage_class = "%s" + location = "%s" + force_destroy = true +} +`, bucketName, storageClass, location) +} + +func testGoogleStorageBucketsCors(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + cors { + origin = ["abc", "def"] + method = ["a1a"] + response_header = ["123", "456", "789"] + max_age_seconds = 10 + } + + cors { + origin = ["ghi", "jkl"] + method = ["z9z"] + response_header = ["000"] + max_age_seconds = 5 + } +} +`, bucketName) +} + +func testAccStorageBucket_defaultEventBasedHold(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + default_event_based_hold = true + force_destroy = true +} +`, bucketName) +} + +func testAccStorageBucket_forceDestroyWithVersioning(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = "true" + versioning { + enabled = "true" + } +} +`, bucketName) +} + +func testAccStorageBucket_enable_object_retention(bucketName string, enabled string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = "true" + enable_object_retention = "%s" +} +`, bucketName, enabled) +} + +func testAccStorageBucket_versioning(bucketName, enabled string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + versioning { + enabled = "%s" + } +} +`, bucketName, enabled) +} + +func testAccStorageBucket_versioning_empty(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true +} +`, bucketName) +} + +func testAccStorageBucket_logging(bucketName string, logBucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + logging { + log_bucket = "%s" + } +} +`, bucketName, logBucketName) +} + +func testAccStorageBucket_loggingWithPrefix(bucketName string, logBucketName string, prefix string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + logging { + log_bucket = "%s" + log_object_prefix = "%s" + } +} +`, bucketName, logBucketName, prefix) +} + +func testAccStorageBucket_lifecycleRulesMultiple(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + matches_storage_class = ["COLDLINE"] + age = 2 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + matches_storage_class = [] + age = 10 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + custom_time_before = "2019-01-01" + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + noncurrent_time_before = "2019-01-01" + } + } + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + created_before = "2019-01-01" + days_since_custom_time = 3 + } + } + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + num_newer_versions = 10 + } + } + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "ARCHIVE" + } + condition { + with_state = "ARCHIVED" + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + matches_prefix = ["test"] + age = 2 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + matches_suffix = ["test"] + age = 2 + } + } + lifecycle_rule { + action { + type = "AbortIncompleteMultipartUpload" + } + condition { + age = 1 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_lifecycleRulesMultiple_update(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + matches_storage_class = ["COLDLINE"] + age = 2 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + matches_storage_class = [] + age = 10 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + custom_time_before = "2019-01-12" + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + noncurrent_time_before = "2019-01-12" + } + } + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + created_before = "2019-01-01" + days_since_custom_time = 5 + } + } + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "NEARLINE" + } + condition { + num_newer_versions = 10 + } + } + lifecycle_rule { + action { + type = "SetStorageClass" + storage_class = "ARCHIVE" + } + condition { + with_state = "ARCHIVED" + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + matches_prefix = ["test"] + age = 2 + } + } + lifecycle_rule { + action { + type = "Delete" + } + condition { + matches_suffix = ["test"] + age = 2 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_lifecycleRule_emptyArchived(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + lifecycle_rule { + action { + type = "Delete" + } + + condition { + age = 10 + } + } +} +`, bucketName) +} + +func testAccStorageBucket_lifecycleRule_withStateArchived(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + lifecycle_rule { + action { + type = "Delete" + } + + condition { + age = 10 + with_state = "ARCHIVED" + } + } +} +`, bucketName) +} + +func testAccStorageBucket_lifecycleRule_withStateLive(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + lifecycle_rule { + action { + type = "Delete" + } + + condition { + age = 10 + with_state = "LIVE" + days_since_noncurrent_time = 5 + } + } + lifecycle_rule { + action { + type = "Delete" + } + + condition { + age = 2 + noncurrent_time_before = "2019-01-01" + } + } +} +`, bucketName) +} + +func testAccStorageBucket_lifecycleRule_withStateAny(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + lifecycle_rule { + action { + type = "Delete" + } + + condition { + age = 10 + with_state = "ANY" + } + } +} +`, bucketName) +} + +func testAccStorageBucket_labels(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + labels = { + my-label = "my-label-value" + } +} +`, bucketName) +} + +func testAccStorageBucket_uniformBucketAccessOnly(bucketName string, enabled bool) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + uniform_bucket_level_access = %t + force_destroy = true +} +`, bucketName, enabled) +} + +func testAccStorageBucket_publicAccessPrevention(bucketName string, prevention string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + public_access_prevention = "%s" + force_destroy = true +} +`, bucketName, prevention) +} + +func testAccStorageBucket_encryption(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_project" "acceptance" { + name = "tf-test-%{random_suffix}" + project_id = "tf-test-%{random_suffix}" + org_id = "%{organization}" + billing_account = "%{billing_account}" +} + +resource "google_project_service" "acceptance" { + project = google_project.acceptance.project_id + service = "cloudkms.googleapis.com" +} + +resource "google_kms_key_ring" "key_ring" { + name = "tf-test-%{random_suffix}" + project = google_project_service.acceptance.project + location = "us" +} + +resource "google_kms_crypto_key" "crypto_key" { + name = "tf-test-%{random_suffix}" + key_ring = google_kms_key_ring.key_ring.id + rotation_period = "1000000s" +} + +data "google_storage_project_service_account" "gcs_account" { +} + +resource "google_kms_crypto_key_iam_member" "iam" { + crypto_key_id = google_kms_crypto_key.crypto_key.id + role = "roles/cloudkms.cryptoKeyEncrypterDecrypter" + member = "serviceAccount:${data.google_storage_project_service_account.gcs_account.email_address}" +} + +resource "google_storage_bucket" "bucket" { + name = "tf-test-crypto-bucket-%{random_int}" + location = "US" + force_destroy = true + encryption { + default_kms_key_name = google_kms_crypto_key.crypto_key.id + } + + depends_on = [google_kms_crypto_key_iam_member.iam] +} +`, context) +} + +func testAccStorageBucket_updateLabels(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + labels = { + my-label = "my-updated-label-value" + a-new-label = "a-new-label-value" + } +} +`, bucketName) +} + +func testAccStorageBucket_website(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "website" { + name = "%s.gcp.tfacc.hashicorptest.com" + location = "US" + storage_class = "STANDARD" + force_destroy = true + + website { + main_page_suffix = "index.html" + not_found_page = "404.html" + } +} +`, bucketName) +} + +func testAccStorageBucket_retentionPolicy(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + + retention_policy { + retention_period = 10 + } +} +`, bucketName) +} + +func testAccStorageBucket_lockedRetentionPolicy(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + + retention_policy { + is_locked = true + retention_period = 10 + } +} +`, bucketName) +} + +func testAccStorageBucket_SoftDeletePolicy(bucketName string, duration int) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + + soft_delete_policy { + retention_duration_seconds = %d + } +} +`, bucketName, duration) +} + +func testAccStorageBucket_websiteNoAttributes(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "website" { + name = "%s.gcp.tfacc.hashicorptest.com" + location = "US" + storage_class = "STANDARD" + force_destroy = true + + website { + } +} +`, bucketName) +} + +func testAccStorageBucket_websiteRemoved(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "website" { + name = "%s.gcp.tfacc.hashicorptest.com" + location = "US" + storage_class = "STANDARD" + force_destroy = true +} +`, bucketName) +} + +func testAccStorageBucket_websiteOneAttribute(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "website" { + name = "%s.gcp.tfacc.hashicorptest.com" + location = "US" + storage_class = "STANDARD" + force_destroy = true + + website { + main_page_suffix = "index.html" + } +} +`, bucketName) +} + +func testAccStorageBucket_websiteOneAttributeUpdate(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "website" { + name = "%s.gcp.tfacc.hashicorptest.com" + location = "US" + storage_class = "STANDARD" + force_destroy = true + + website { + main_page_suffix = "default.html" + } +} +`, bucketName) +} + +func testAccStorageBucket_forceDestroy(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true +} +`, bucketName) +} + +func testAccStorageBucket_forceDestroyWithRetentionPolicy(bucketName string) string { + return fmt.Sprintf(` +resource "google_storage_bucket" "bucket" { + name = "%s" + location = "US" + force_destroy = true + + retention_policy { + retention_period = 3600 + } +} +`, bucketName) +} diff --git a/mmv1/third_party/terraform/services/storagetransfer/go/resource_storage_transfer_job.go.tmpl b/mmv1/third_party/terraform/services/storagetransfer/go/resource_storage_transfer_job.go.tmpl new file mode 100644 index 000000000000..a901c6885194 --- /dev/null +++ b/mmv1/third_party/terraform/services/storagetransfer/go/resource_storage_transfer_job.go.tmpl @@ -0,0 +1,1313 @@ +package storagetransfer + +import ( + "fmt" + "log" + "reflect" + "strings" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + "github.com/hashicorp/terraform-provider-google/google/verify" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/retry" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" + "google.golang.org/api/storagetransfer/v1" +) + +var ( + objectConditionsKeys = []string{ + "transfer_spec.0.object_conditions.0.min_time_elapsed_since_last_modification", + "transfer_spec.0.object_conditions.0.max_time_elapsed_since_last_modification", + "transfer_spec.0.object_conditions.0.include_prefixes", + "transfer_spec.0.object_conditions.0.exclude_prefixes", + "transfer_spec.0.object_conditions.0.last_modified_since", + "transfer_spec.0.object_conditions.0.last_modified_before", + } + + transferOptionsKeys = []string{ + "transfer_spec.0.transfer_options.0.overwrite_objects_already_existing_in_sink", + "transfer_spec.0.transfer_options.0.delete_objects_unique_in_sink", + "transfer_spec.0.transfer_options.0.delete_objects_from_source_after_transfer", + "transfer_spec.0.transfer_options.0.overwrite_when", + } + + transferSpecDataSourceKeys = []string{ + "transfer_spec.0.gcs_data_source", + "transfer_spec.0.aws_s3_data_source", + "transfer_spec.0.http_data_source", + "transfer_spec.0.azure_blob_storage_data_source", + "transfer_spec.0.posix_data_source", + } + transferSpecDataSinkKeys = []string{ + "transfer_spec.0.gcs_data_sink", + "transfer_spec.0.posix_data_sink", + } + awsS3AuthKeys = []string{ + "transfer_spec.0.aws_s3_data_source.0.aws_access_key", + "transfer_spec.0.aws_s3_data_source.0.role_arn", + } + {{- if ne $.TargetVersionName "ga" }} + azureOptionCredentials = []string{ + "transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials", + "transfer_spec.0.azure_blob_storage_data_source.0.credentials_secret", + } + {{- end }} +) + +func ResourceStorageTransferJob() *schema.Resource { + return &schema.Resource{ + Create: resourceStorageTransferJobCreate, + Read: resourceStorageTransferJobRead, + Update: resourceStorageTransferJobUpdate, + Delete: resourceStorageTransferJobDelete, + Importer: &schema.ResourceImporter{ + State: resourceStorageTransferJobStateImporter, + }, + + CustomizeDiff: customdiff.All( + tpgresource.DefaultProviderProject, + ), + + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + Description: `The name of the Transfer Job.`, + }, + "description": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringLenBetween(0, 1024), + Description: `Unique description to identify the Transfer Job.`, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `The project in which the resource belongs. If it is not provided, the provider project is used.`, + }, + "event_stream": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + ConflictsWith: []string{"schedule"}, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "Specifies a unique name of the resource such as AWS SQS ARN in the form 'arn:aws:sqs:region:account_id:queue_name', or Pub/Sub subscription resource name in the form 'projects/{project}/subscriptions/{sub}'", + }, + "event_stream_start_time": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the date and time that Storage Transfer Service starts listening for events from this stream. If no start time is specified or start time is in the past, Storage Transfer Service starts listening immediately", + ValidateFunc: validation.IsRFC3339Time, + }, + "event_stream_expiration_time": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies the data and time at which Storage Transfer Service stops listening for events from this stream. After this time, any transfers in progress will complete, but no new transfers are initiated", + ValidateFunc: validation.IsRFC3339Time, + }, + }, + }, + }, + "transfer_spec": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "object_conditions": objectConditionsSchema(), + "transfer_options": transferOptionsSchema(), + "source_agent_pool_name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.`, + }, + "sink_agent_pool_name": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + Description: `Specifies the agent pool name associated with the posix data source. When unspecified, the default name is used.`, + }, + "gcs_data_sink": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: gcsDataSchema(), + ExactlyOneOf: transferSpecDataSinkKeys, + Description: `A Google Cloud Storage data sink.`, + }, + "posix_data_sink": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: posixDataSchema(), + ExactlyOneOf: transferSpecDataSinkKeys, + Description: `A POSIX filesystem data sink.`, + }, + "gcs_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: gcsDataSchema(), + ExactlyOneOf: transferSpecDataSourceKeys, + Description: `A Google Cloud Storage data source.`, + }, + "aws_s3_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: awsS3DataSchema(), + ExactlyOneOf: transferSpecDataSourceKeys, + Description: `An AWS S3 data source.`, + }, + "http_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: httpDataSchema(), + ExactlyOneOf: transferSpecDataSourceKeys, + Description: `A HTTP URL data source.`, + }, + "posix_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: posixDataSchema(), + ExactlyOneOf: transferSpecDataSourceKeys, + Description: `A POSIX filesystem data source.`, + }, + "azure_blob_storage_data_source": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: azureBlobStorageDataSchema(), + ExactlyOneOf: transferSpecDataSourceKeys, + Description: `An Azure Blob Storage data source.`, + }, + }, + }, + Description: `Transfer specification.`, + }, + "notification_config": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "pubsub_topic": { + Type: schema.TypeString, + Required: true, + Description: `The Topic.name of the Pub/Sub topic to which to publish notifications.`, + }, + "event_types": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateFunc: validation.StringInSlice([]string{"TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED"}, false), + }, + Description: `Event types for which a notification is desired. If empty, send notifications for all event types. The valid types are "TRANSFER_OPERATION_SUCCESS", "TRANSFER_OPERATION_FAILED", "TRANSFER_OPERATION_ABORTED".`, + }, + "payload_format": { + Type: schema.TypeString, + Required: true, + ValidateFunc: validation.StringInSlice([]string{"NONE", "JSON"}, false), + Description: `The desired format of the notification message payloads. One of "NONE" or "JSON".`, + }, + }, + }, + Description: `Notification configuration.`, + }, + "schedule": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + ConflictsWith: []string{"event_stream"}, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "schedule_start_date": { + Type: schema.TypeList, + Required: true, + MaxItems: 1, + Elem: dateObjectSchema(), + Description: `The first day the recurring transfer is scheduled to run. If schedule_start_date is in the past, the transfer will run for the first time on the following day.`, + }, + "schedule_end_date": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: dateObjectSchema(), + Description: `The last day the recurring transfer will be run. If schedule_end_date is the same as schedule_start_date, the transfer will be executed only once.`, + }, + "start_time_of_day": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: timeObjectSchema(), + DiffSuppressFunc: diffSuppressEmptyStartTimeOfDay, + Description: `The time in UTC at which the transfer will be scheduled to start in a day. Transfers may start later than this time. If not specified, recurring and one-time transfers that are scheduled to run today will run immediately; recurring transfers that are scheduled to run on a future date will start at approximately midnight UTC on that date. Note that when configuring a transfer with the Cloud Platform Console, the transfer's start time in a day is specified in your local timezone.`, + }, + "repeat_interval": { + Type: schema.TypeString, + ValidateFunc: verify.ValidateDuration(), + Optional: true, + Description: `Interval between the start of each scheduled transfer. If unspecified, the default value is 24 hours. This value may not be less than 1 hour. A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".`, + Default: "86400s", + }, + }, + }, + Description: `Schedule specification defining when the Transfer Job should be scheduled to start, end and what time to run.`, + }, + "status": { + Type: schema.TypeString, + Optional: true, + Default: "ENABLED", + ValidateFunc: validation.StringInSlice([]string{"ENABLED", "DISABLED", "DELETED"}, false), + Description: `Status of the job. Default: ENABLED. NOTE: The effect of the new job status takes place during a subsequent job run. For example, if you change the job status from ENABLED to DISABLED, and an operation spawned by the transfer is running, the status change would not affect the current operation.`, + }, + "creation_time": { + Type: schema.TypeString, + Computed: true, + Description: `When the Transfer Job was created.`, + }, + "last_modification_time": { + Type: schema.TypeString, + Computed: true, + Description: `When the Transfer Job was last modified.`, + }, + "deletion_time": { + Type: schema.TypeString, + Computed: true, + Description: `When the Transfer Job was deleted.`, + }, + }, + UseJSONNumber: true, + } +} + +func objectConditionsSchema() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "min_time_elapsed_since_last_modification": { + Type: schema.TypeString, + ValidateFunc: verify.ValidateDuration(), + Optional: true, + AtLeastOneOf: objectConditionsKeys, + Description: `A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".`, + }, + "max_time_elapsed_since_last_modification": { + Type: schema.TypeString, + ValidateFunc: verify.ValidateDuration(), + Optional: true, + AtLeastOneOf: objectConditionsKeys, + Description: `A duration in seconds with up to nine fractional digits, terminated by 's'. Example: "3.5s".`, + }, + "include_prefixes": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: objectConditionsKeys, + Elem: &schema.Schema{ + MaxItems: 1000, + Type: schema.TypeString, + }, + Description: `If include_refixes is specified, objects that satisfy the object conditions must have names that start with one of the include_prefixes and that do not start with any of the exclude_prefixes. If include_prefixes is not specified, all objects except those that have names starting with one of the exclude_prefixes must satisfy the object conditions.`, + }, + "exclude_prefixes": { + Type: schema.TypeList, + Optional: true, + AtLeastOneOf: objectConditionsKeys, + Elem: &schema.Schema{ + MaxItems: 1000, + Type: schema.TypeString, + }, + Description: `exclude_prefixes must follow the requirements described for include_prefixes.`, + }, + "last_modified_since": { + Type: schema.TypeString, + ValidateFunc: verify.ValidateRFC3339Date, + Optional: true, + AtLeastOneOf: objectConditionsKeys, + Description: `If specified, only objects with a "last modification time" on or after this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".`, + }, + "last_modified_before": { + Type: schema.TypeString, + ValidateFunc: verify.ValidateRFC3339Date, + Optional: true, + AtLeastOneOf: objectConditionsKeys, + Description: `If specified, only objects with a "last modification time" before this timestamp and objects that don't have a "last modification time" are transferred. A timestamp in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. Examples: "2014-10-02T15:01:23Z" and "2014-10-02T15:01:23.045123456Z".`, + }, + }, + }, + Description: `Only objects that satisfy these object conditions are included in the set of data source and data sink objects. Object conditions based on objects' last_modification_time do not exclude objects in a data sink.`, + } +} + +func transferOptionsSchema() *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "overwrite_objects_already_existing_in_sink": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: transferOptionsKeys, + Description: `Whether overwriting objects that already exist in the sink is allowed.`, + }, + "delete_objects_unique_in_sink": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: transferOptionsKeys, + ConflictsWith: []string{"transfer_spec.transfer_options.delete_objects_from_source_after_transfer"}, + Description: `Whether objects that exist only in the sink should be deleted. Note that this option and delete_objects_from_source_after_transfer are mutually exclusive.`, + }, + "delete_objects_from_source_after_transfer": { + Type: schema.TypeBool, + Optional: true, + AtLeastOneOf: transferOptionsKeys, + ConflictsWith: []string{"transfer_spec.transfer_options.delete_objects_unique_in_sink"}, + Description: `Whether objects should be deleted from the source after they are transferred to the sink. Note that this option and delete_objects_unique_in_sink are mutually exclusive.`, + }, + "overwrite_when": { + Type: schema.TypeString, + Optional: true, + AtLeastOneOf: transferOptionsKeys, + ValidateFunc: validation.StringInSlice([]string{"DIFFERENT", "NEVER", "ALWAYS"}, false), + Description: `When to overwrite objects that already exist in the sink. If not set, overwrite behavior is determined by overwriteObjectsAlreadyExistingInSink.`, + }, + }, + }, + Description: `Characteristics of how to treat files from datasource and sink during job. If the option delete_objects_unique_in_sink is true, object conditions based on objects' last_modification_time are ignored and do not exclude objects in a data source or a data sink.`, + } +} + +func timeObjectSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "hours": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(0, 24), + Description: `Hours of day in 24 hour format. Should be from 0 to 23.`, + }, + "minutes": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(0, 59), + Description: `Minutes of hour of day. Must be from 0 to 59.`, + }, + "seconds": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(0, 60), + Description: `Seconds of minutes of the time. Must normally be from 0 to 59.`, + }, + "nanos": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(0, 999999999), + Description: `Fractions of seconds in nanoseconds. Must be from 0 to 999,999,999.`, + }, + }, + } +} + +func dateObjectSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "year": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(0, 9999), + Description: `Year of date. Must be from 1 to 9999.`, + }, + + "month": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(1, 12), + Description: `Month of year. Must be from 1 to 12.`, + }, + + "day": { + Type: schema.TypeInt, + Required: true, + ValidateFunc: validation.IntBetween(0, 31), + Description: `Day of month. Must be from 1 to 31 and valid for the year and month.`, + }, + }, + } +} + +func gcsDataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bucket_name": { + Required: true, + Type: schema.TypeString, + Description: `Google Cloud Storage bucket name.`, + }, + "path": { + Optional: true, + Computed: true, + Type: schema.TypeString, + Description: `Google Cloud Storage path in bucket to transfer`, + }, + }, + } +} + +func awsS3DataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "bucket_name": { + Required: true, + Type: schema.TypeString, + Description: `S3 Bucket name.`, + }, + "path": { + Optional: true, + Type: schema.TypeString, + Description: `S3 Bucket path in bucket to transfer.`, + }, + "aws_access_key": { + Type: schema.TypeList, + Optional: true, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "access_key_id": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: `AWS Key ID.`, + }, + "secret_access_key": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: `AWS Secret Access Key.`, + }, + }, + }, + ExactlyOneOf: awsS3AuthKeys, + Description: `AWS credentials block.`, + }, + "role_arn": { + Type: schema.TypeString, + Optional: true, + ExactlyOneOf: awsS3AuthKeys, + Description: `The Amazon Resource Name (ARN) of the role to support temporary credentials via 'AssumeRoleWithWebIdentity'. For more information about ARNs, see [IAM ARNs](https://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html#identifiers-arns). When a role ARN is provided, Transfer Service fetches temporary credentials for the session using a 'AssumeRoleWithWebIdentity' call for the provided role using the [GoogleServiceAccount][] for this project.`, + }, + }, + } +} + +func httpDataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "list_url": { + Type: schema.TypeString, + Required: true, + Description: `The URL that points to the file that stores the object list entries. This file must allow public access. Currently, only URLs with HTTP and HTTPS schemes are supported.`, + }, + }, + } +} + +func posixDataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "root_directory": { + Type: schema.TypeString, + Required: true, + Description: `Root directory path to the filesystem.`, + }, + }, + } +} + +func azureBlobStorageDataSchema() *schema.Resource { + return &schema.Resource{ + Schema: map[string]*schema.Schema{ + "storage_account": { + Required: true, + Type: schema.TypeString, + Description: `The name of the Azure Storage account.`, + }, + "container": { + Required: true, + Type: schema.TypeString, + Description: `The container to transfer from the Azure Storage account.`, + }, + "path": { + Optional: true, + Computed: true, + Type: schema.TypeString, + Description: `Root path to transfer objects. Must be an empty string or full path name that ends with a '/'. This field is treated as an object prefix. As such, it should generally not begin with a '/'.`, + }, + "azure_credentials": { + Type: schema.TypeList, + {{- if ne $.TargetVersionName "ga" }} + Optional: true, + ExactlyOneOf: azureOptionCredentials, + {{- else }} + Required: true, + {{- end }} + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "sas_token": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: `Azure shared access signature.`, + }, + }, + }, + Description: ` Credentials used to authenticate API requests to Azure.`, + }, + {{- if ne $.TargetVersionName "ga" }} + "credentials_secret": { + Optional: true, + Type: schema.TypeString, + Description: `The Resource name of a secret in Secret Manager containing SAS Credentials in JSON form. Service Agent must have permissions to access secret. If credentials_secret is specified, do not specify azure_credentials.`, + ExactlyOneOf: azureOptionCredentials, + }, + {{- end }} + }, + } +} + +func diffSuppressEmptyStartTimeOfDay(k, old, new string, d *schema.ResourceData) bool { + return k == "schedule.0.start_time_of_day.#" && old == "1" && new == "0" +} + +func resourceStorageTransferJobCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + transferJob := &storagetransfer.TransferJob{ + Name: d.Get("name").(string), + Description: d.Get("description").(string), + ProjectId: project, + Status: d.Get("status").(string), + Schedule: expandTransferSchedules(d.Get("schedule").([]interface{})), + EventStream: expandEventStream(d.Get("event_stream").([]interface{})), + TransferSpec: expandTransferSpecs(d.Get("transfer_spec").([]interface{})), + NotificationConfig: expandTransferJobNotificationConfig(d.Get("notification_config").([]interface{})), + } + + var res *storagetransfer.TransferJob + + err = transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() error { + res, err = config.NewStorageTransferClient(userAgent).TransferJobs.Create(transferJob).Do() + return err + }, + }) + + if err != nil { + fmt.Printf("Error creating transfer job %v: %v", transferJob, err) + return err + } + + if err := d.Set("name", res.Name); err != nil { + return fmt.Errorf("Error setting name: %s", err) + } + + name := tpgresource.GetResourceNameFromSelfLink(res.Name) + d.SetId(fmt.Sprintf("%s/%s", project, name)) + + return resourceStorageTransferJobRead(d, meta) +} + +func resourceStorageTransferJobRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + name := d.Get("name").(string) + res, err := config.NewStorageTransferClient(userAgent).TransferJobs.Get(name, project).Do() + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("Transfer Job %q", name)) + } + + if res.Status == "DELETED" { + d.SetId("") + return nil + } + + if err := d.Set("project", res.ProjectId); err != nil { + return fmt.Errorf("Error setting project: %s", err) + } + if err := d.Set("description", res.Description); err != nil { + return fmt.Errorf("Error setting description: %s", err) + } + if err := d.Set("status", res.Status); err != nil { + return fmt.Errorf("Error setting status: %s", err) + } + if err := d.Set("last_modification_time", res.LastModificationTime); err != nil { + return fmt.Errorf("Error setting last_modification_time: %s", err) + } + if err := d.Set("creation_time", res.CreationTime); err != nil { + return fmt.Errorf("Error setting creation_time: %s", err) + } + if err := d.Set("deletion_time", res.DeletionTime); err != nil { + return fmt.Errorf("Error setting deletion_time: %s", err) + } + + err = d.Set("schedule", flattenTransferSchedule(res.Schedule)) + if err != nil { + return err + } + + err = d.Set("event_stream", flattenTransferEventStream(res.EventStream)) + if err != nil { + return err + } + + err = d.Set("transfer_spec", flattenTransferSpec(res.TransferSpec, d)) + if err != nil { + return err + } + + err = d.Set("notification_config", flattenTransferJobNotificationConfig(res.NotificationConfig)) + if err != nil { + return err + } + + return nil +} + +func resourceStorageTransferJobUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + transferJob := &storagetransfer.TransferJob{} + fieldMask := []string{} + + if d.HasChange("event_stream") { + fieldMask = append(fieldMask, "event_stream") + if v, ok := d.GetOk("event_stream"); ok { + transferJob.EventStream = expandEventStream(v.([]interface{})) + } + } + + if d.HasChange("description") { + fieldMask = append(fieldMask, "description") + if v, ok := d.GetOk("description"); ok { + transferJob.Description = v.(string) + } + } + + if d.HasChange("status") { + fieldMask = append(fieldMask, "status") + if v, ok := d.GetOk("status"); ok { + transferJob.Status = v.(string) + } + } + + if d.HasChange("schedule") { + fieldMask = append(fieldMask, "schedule") + if v, ok := d.GetOk("schedule"); ok { + transferJob.Schedule = expandTransferSchedules(v.([]interface{})) + } + } + + if d.HasChange("transfer_spec") { + fieldMask = append(fieldMask, "transfer_spec") + if v, ok := d.GetOk("transfer_spec"); ok { + transferJob.TransferSpec = expandTransferSpecs(v.([]interface{})) + } + } + + if d.HasChange("notification_config") { + fieldMask = append(fieldMask, "notification_config") + if v, ok := d.GetOk("notification_config"); ok { + transferJob.NotificationConfig = expandTransferJobNotificationConfig(v.([]interface{})) + } else { + transferJob.NotificationConfig = nil + } + } + + if len(fieldMask) == 0 { + return nil + } + + updateRequest := &storagetransfer.UpdateTransferJobRequest{ + ProjectId: project, + TransferJob: transferJob, + } + + updateRequest.UpdateTransferJobFieldMask = strings.Join(fieldMask, ",") + + res, err := config.NewStorageTransferClient(userAgent).TransferJobs.Patch(d.Get("name").(string), updateRequest).Do() + if err != nil { + return err + } + + log.Printf("[DEBUG] Patched transfer job: %v\n\n", res.Name) + return nil +} + +func resourceStorageTransferJobDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + transferJobName := d.Get("name").(string) + + transferJob := &storagetransfer.TransferJob{ + Status: "DELETED", + } + + fieldMask := "status" + + updateRequest := &storagetransfer.UpdateTransferJobRequest{ + ProjectId: project, + TransferJob: transferJob, + } + + updateRequest.UpdateTransferJobFieldMask = fieldMask + + // Update transfer job with status set to DELETE + log.Printf("[DEBUG] Setting status to DELETE for: %v\n\n", transferJobName) + err = retry.Retry(1*time.Minute, func() *retry.RetryError { + _, err := config.NewStorageTransferClient(userAgent).TransferJobs.Patch(transferJobName, updateRequest).Do() + if err != nil { + return retry.RetryableError(err) + } + + return nil + }) + + if err != nil { + fmt.Printf("Error deleting transfer job %v: %v\n\n", transferJob, err) + return err + } + + log.Printf("[DEBUG] Deleted transfer job %v\n\n", transferJob) + + return nil +} + +func resourceStorageTransferJobStateImporter(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + parts := strings.Split(d.Id(), "/") + switch len(parts) { + case 2: + if err := d.Set("project", parts[0]); err != nil { + return nil, fmt.Errorf("Error setting project: %s", err) + } + if err := d.Set("name", fmt.Sprintf("transferJobs/%s", parts[1])); err != nil { + return nil, fmt.Errorf("Error setting name: %s", err) + } + default: + return nil, fmt.Errorf("Invalid transfer job specifier. Expecting {projectId}/{transferJobName}") + } + return []*schema.ResourceData{d}, nil +} + +func expandDates(dates []interface{}) *storagetransfer.Date { + if len(dates) == 0 || dates[0] == nil { + return nil + } + + dateMap := dates[0].(map[string]interface{}) + date := &storagetransfer.Date{} + if v, ok := dateMap["day"]; ok { + date.Day = int64(v.(int)) + } + + if v, ok := dateMap["month"]; ok { + date.Month = int64(v.(int)) + } + + if v, ok := dateMap["year"]; ok { + date.Year = int64(v.(int)) + } + + log.Printf("[DEBUG] not nil date: %#v", dates) + return date +} + +func flattenDate(date *storagetransfer.Date) []map[string]interface{} { + data := map[string]interface{}{ + "year": date.Year, + "month": date.Month, + "day": date.Day, + } + + return []map[string]interface{}{data} +} + +func expandTimeOfDays(times []interface{}) *storagetransfer.TimeOfDay { + if len(times) == 0 || times[0] == nil { + return nil + } + + timeMap := times[0].(map[string]interface{}) + time := &storagetransfer.TimeOfDay{} + if v, ok := timeMap["hours"]; ok { + time.Hours = int64(v.(int)) + } + + if v, ok := timeMap["minutes"]; ok { + time.Minutes = int64(v.(int)) + } + + if v, ok := timeMap["seconds"]; ok { + time.Seconds = int64(v.(int)) + } + + if v, ok := timeMap["nanos"]; ok { + time.Nanos = int64(v.(int)) + } + + return time +} + +func flattenTimeOfDay(timeOfDay *storagetransfer.TimeOfDay) []map[string]interface{} { + data := map[string]interface{}{ + "hours": timeOfDay.Hours, + "minutes": timeOfDay.Minutes, + "seconds": timeOfDay.Seconds, + "nanos": timeOfDay.Nanos, + } + + return []map[string]interface{}{data} +} + +func expandTransferSchedules(transferSchedules []interface{}) *storagetransfer.Schedule { + if len(transferSchedules) == 0 || transferSchedules[0] == nil { + return nil + } + + schedule := transferSchedules[0].(map[string]interface{}) + return &storagetransfer.Schedule{ + ScheduleStartDate: expandDates(schedule["schedule_start_date"].([]interface{})), + ScheduleEndDate: expandDates(schedule["schedule_end_date"].([]interface{})), + StartTimeOfDay: expandTimeOfDays(schedule["start_time_of_day"].([]interface{})), + RepeatInterval: schedule["repeat_interval"].(string), + } +} + +func flattenTransferSchedule(transferSchedule *storagetransfer.Schedule) []map[string]interface{} { + if transferSchedule == nil || reflect.DeepEqual(transferSchedule, &storagetransfer.Schedule{}) { + return nil + } + + data := map[string]interface{}{ + "schedule_start_date": flattenDate(transferSchedule.ScheduleStartDate), + } + + if transferSchedule.ScheduleEndDate != nil { + data["schedule_end_date"] = flattenDate(transferSchedule.ScheduleEndDate) + } + + if transferSchedule.StartTimeOfDay != nil { + data["start_time_of_day"] = flattenTimeOfDay(transferSchedule.StartTimeOfDay) + } + + if transferSchedule.RepeatInterval != "" { + data["repeat_interval"] = transferSchedule.RepeatInterval + } + + return []map[string]interface{}{data} +} + +func expandEventStream(e []interface{}) *storagetransfer.EventStream { + if len(e) == 0 || e[0] == nil { + return nil + } + + eventStream := e[0].(map[string]interface{}) + return &storagetransfer.EventStream{ + Name: eventStream["name"].(string), + EventStreamStartTime: eventStream["event_stream_start_time"].(string), + EventStreamExpirationTime: eventStream["event_stream_expiration_time"].(string), + } +} + +func flattenTransferEventStream(eventStream *storagetransfer.EventStream) []map[string]interface{} { + if eventStream == nil || reflect.DeepEqual(eventStream, &storagetransfer.EventStream{}) { + return nil + } + + data := map[string]interface{}{ + "name": eventStream.Name, + } + + if eventStream.EventStreamStartTime != "" { + data["event_stream_start_time"] = eventStream.EventStreamStartTime + } + + if eventStream.EventStreamExpirationTime != "" { + data["event_stream_expiration_time"] = eventStream.EventStreamExpirationTime + } + + return []map[string]interface{}{data} +} + +func expandGcsData(gcsDatas []interface{}) *storagetransfer.GcsData { + if len(gcsDatas) == 0 || gcsDatas[0] == nil { + return nil + } + + gcsData := gcsDatas[0].(map[string]interface{}) + var apiData = &storagetransfer.GcsData{ + BucketName: gcsData["bucket_name"].(string), + } + var path = gcsData["path"].(string) + apiData.Path = path + + return apiData +} + +func flattenGcsData(gcsData *storagetransfer.GcsData) []map[string]interface{} { + data := map[string]interface{}{ + "bucket_name": gcsData.BucketName, + "path": gcsData.Path, + } + return []map[string]interface{}{data} +} + +func expandAwsAccessKeys(awsAccessKeys []interface{}) *storagetransfer.AwsAccessKey { + if len(awsAccessKeys) == 0 || awsAccessKeys[0] == nil { + return nil + } + + awsAccessKey := awsAccessKeys[0].(map[string]interface{}) + return &storagetransfer.AwsAccessKey{ + AccessKeyId: awsAccessKey["access_key_id"].(string), + SecretAccessKey: awsAccessKey["secret_access_key"].(string), + } +} + +func flattenAwsAccessKeys(d *schema.ResourceData) []map[string]interface{} { + data := map[string]interface{}{ + "access_key_id": d.Get("transfer_spec.0.aws_s3_data_source.0.aws_access_key.0.access_key_id"), + "secret_access_key": d.Get("transfer_spec.0.aws_s3_data_source.0.aws_access_key.0.secret_access_key"), + } + + return []map[string]interface{}{data} +} + +func expandAwsS3Data(awsS3Datas []interface{}) *storagetransfer.AwsS3Data { + if len(awsS3Datas) == 0 || awsS3Datas[0] == nil { + return nil + } + + awsS3Data := awsS3Datas[0].(map[string]interface{}) + return &storagetransfer.AwsS3Data{ + BucketName: awsS3Data["bucket_name"].(string), + AwsAccessKey: expandAwsAccessKeys(awsS3Data["aws_access_key"].([]interface{})), + RoleArn: awsS3Data["role_arn"].(string), + Path: awsS3Data["path"].(string), + } +} + +func flattenAwsS3Data(awsS3Data *storagetransfer.AwsS3Data, d *schema.ResourceData) []map[string]interface{} { + data := map[string]interface{}{ + "bucket_name": awsS3Data.BucketName, + "path": awsS3Data.Path, + "role_arn": awsS3Data.RoleArn, + } + if _, exist := d.GetOkExists("transfer_spec.0.aws_s3_data_source.0.aws_access_key"); exist{ + data["aws_access_key"] = flattenAwsAccessKeys(d) + } + return []map[string]interface{}{data} +} + +func expandHttpData(httpDatas []interface{}) *storagetransfer.HttpData { + if len(httpDatas) == 0 || httpDatas[0] == nil { + return nil + } + + httpData := httpDatas[0].(map[string]interface{}) + return &storagetransfer.HttpData{ + ListUrl: httpData["list_url"].(string), + } +} + +func flattenHttpData(httpData *storagetransfer.HttpData) []map[string]interface{} { + data := map[string]interface{}{ + "list_url": httpData.ListUrl, + } + + return []map[string]interface{}{data} +} + +func expandPosixData(posixDatas []interface{}) *storagetransfer.PosixFilesystem { + if len(posixDatas) == 0 || posixDatas[0] == nil { + return nil + } + + posixData := posixDatas[0].(map[string]interface{}) + return &storagetransfer.PosixFilesystem{ + RootDirectory: posixData["root_directory"].(string), + } +} + +func flattenPosixData(posixData *storagetransfer.PosixFilesystem) []map[string]interface{} { + data := map[string]interface{}{ + "root_directory": posixData.RootDirectory, + } + + return []map[string]interface{}{data} +} + +func expandAzureCredentials(azureCredentials []interface{}) *storagetransfer.AzureCredentials { + if len(azureCredentials) == 0 || azureCredentials[0] == nil { + return nil + } + + azureCredential := azureCredentials[0].(map[string]interface{}) + return &storagetransfer.AzureCredentials{ + SasToken: azureCredential["sas_token"].(string), + } +} + +func flattenAzureCredentials(d *schema.ResourceData) []map[string]interface{} { + {{- if ne $.TargetVersionName "ga" }} + if d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token") == "" { + return []map[string]interface{}{} + } + {{- end }} + data := map[string]interface{}{ + "sas_token": d.Get("transfer_spec.0.azure_blob_storage_data_source.0.azure_credentials.0.sas_token"), + } + + return []map[string]interface{}{data} +} + +func expandAzureBlobStorageData(azureBlobStorageDatas []interface{}) *storagetransfer.AzureBlobStorageData { + if len(azureBlobStorageDatas) == 0 || azureBlobStorageDatas[0] == nil { + return nil + } + + azureBlobStorageData := azureBlobStorageDatas[0].(map[string]interface{}) + + return &storagetransfer.AzureBlobStorageData{ + Container: azureBlobStorageData["container"].(string), + Path: azureBlobStorageData["path"].(string), + StorageAccount: azureBlobStorageData["storage_account"].(string), + AzureCredentials: expandAzureCredentials(azureBlobStorageData["azure_credentials"].([]interface{})), + {{- if ne $.TargetVersionName "ga" }} + CredentialsSecret: azureBlobStorageData["credentials_secret"].(string), + {{- end }} + } +} + +func flattenAzureBlobStorageData(azureBlobStorageData *storagetransfer.AzureBlobStorageData, d *schema.ResourceData) []map[string]interface{} { + data := map[string]interface{}{ + "container": azureBlobStorageData.Container, + "path": azureBlobStorageData.Path, + "storage_account": azureBlobStorageData.StorageAccount, + "azure_credentials": flattenAzureCredentials(d), + {{- if ne $.TargetVersionName "ga" }} + "credentials_secret": azureBlobStorageData.CredentialsSecret, + {{- end }} + } + + return []map[string]interface{}{data} +} + +func expandObjectConditions(conditions []interface{}) *storagetransfer.ObjectConditions { + if len(conditions) == 0 || conditions[0] == nil { + return nil + } + + condition := conditions[0].(map[string]interface{}) + return &storagetransfer.ObjectConditions{ + ExcludePrefixes: tpgresource.ConvertStringArr(condition["exclude_prefixes"].([]interface{})), + IncludePrefixes: tpgresource.ConvertStringArr(condition["include_prefixes"].([]interface{})), + MaxTimeElapsedSinceLastModification: condition["max_time_elapsed_since_last_modification"].(string), + MinTimeElapsedSinceLastModification: condition["min_time_elapsed_since_last_modification"].(string), + LastModifiedSince: condition["last_modified_since"].(string), + LastModifiedBefore: condition["last_modified_before"].(string), + } +} + +func flattenObjectCondition(condition *storagetransfer.ObjectConditions) []map[string]interface{} { + data := map[string]interface{}{ + "exclude_prefixes": condition.ExcludePrefixes, + "include_prefixes": condition.IncludePrefixes, + "max_time_elapsed_since_last_modification": condition.MaxTimeElapsedSinceLastModification, + "min_time_elapsed_since_last_modification": condition.MinTimeElapsedSinceLastModification, + "last_modified_since": condition.LastModifiedSince, + "last_modified_before": condition.LastModifiedBefore, + } + return []map[string]interface{}{data} +} + +func expandTransferOptions(options []interface{}) *storagetransfer.TransferOptions { + if len(options) == 0 || options[0] == nil { + return nil + } + + option := options[0].(map[string]interface{}) + return &storagetransfer.TransferOptions{ + DeleteObjectsFromSourceAfterTransfer: option["delete_objects_from_source_after_transfer"].(bool), + DeleteObjectsUniqueInSink: option["delete_objects_unique_in_sink"].(bool), + OverwriteObjectsAlreadyExistingInSink: option["overwrite_objects_already_existing_in_sink"].(bool), + OverwriteWhen: option["overwrite_when"].(string), + } +} + +func flattenTransferOption(option *storagetransfer.TransferOptions) []map[string]interface{} { + data := map[string]interface{}{ + "delete_objects_from_source_after_transfer": option.DeleteObjectsFromSourceAfterTransfer, + "delete_objects_unique_in_sink": option.DeleteObjectsUniqueInSink, + "overwrite_objects_already_existing_in_sink": option.OverwriteObjectsAlreadyExistingInSink, + "overwrite_when": option.OverwriteWhen, + } + + return []map[string]interface{}{data} +} + +func expandTransferSpecs(transferSpecs []interface{}) *storagetransfer.TransferSpec { + if len(transferSpecs) == 0 || transferSpecs[0] == nil { + return nil + } + + transferSpec := transferSpecs[0].(map[string]interface{}) + return &storagetransfer.TransferSpec{ + SourceAgentPoolName: transferSpec["source_agent_pool_name"].(string), + SinkAgentPoolName: transferSpec["sink_agent_pool_name"].(string), + GcsDataSink: expandGcsData(transferSpec["gcs_data_sink"].([]interface{})), + PosixDataSink: expandPosixData(transferSpec["posix_data_sink"].([]interface{})), + ObjectConditions: expandObjectConditions(transferSpec["object_conditions"].([]interface{})), + TransferOptions: expandTransferOptions(transferSpec["transfer_options"].([]interface{})), + GcsDataSource: expandGcsData(transferSpec["gcs_data_source"].([]interface{})), + AwsS3DataSource: expandAwsS3Data(transferSpec["aws_s3_data_source"].([]interface{})), + HttpDataSource: expandHttpData(transferSpec["http_data_source"].([]interface{})), + AzureBlobStorageDataSource: expandAzureBlobStorageData(transferSpec["azure_blob_storage_data_source"].([]interface{})), + PosixDataSource: expandPosixData(transferSpec["posix_data_source"].([]interface{})), + } +} + +func flattenTransferSpec(transferSpec *storagetransfer.TransferSpec, d *schema.ResourceData) []map[string]interface{} { + + data := map[string]interface{}{} + + data["sink_agent_pool_name"] = transferSpec.SinkAgentPoolName + data["source_agent_pool_name"] = transferSpec.SourceAgentPoolName + + if transferSpec.GcsDataSink != nil { + data["gcs_data_sink"] = flattenGcsData(transferSpec.GcsDataSink) + } + if transferSpec.PosixDataSink != nil { + data["posix_data_sink"] = flattenPosixData(transferSpec.PosixDataSink) + } + + if transferSpec.ObjectConditions != nil { + data["object_conditions"] = flattenObjectCondition(transferSpec.ObjectConditions) + } + if transferSpec.TransferOptions != nil && + (usingPosix(transferSpec) == false || + (usingPosix(transferSpec) == true && reflect.DeepEqual(transferSpec.TransferOptions, &storagetransfer.TransferOptions{}) == false)) { + data["transfer_options"] = flattenTransferOption(transferSpec.TransferOptions) + } + if transferSpec.GcsDataSource != nil { + data["gcs_data_source"] = flattenGcsData(transferSpec.GcsDataSource) + } else if transferSpec.AwsS3DataSource != nil { + data["aws_s3_data_source"] = flattenAwsS3Data(transferSpec.AwsS3DataSource, d) + } else if transferSpec.HttpDataSource != nil { + data["http_data_source"] = flattenHttpData(transferSpec.HttpDataSource) + } else if transferSpec.AzureBlobStorageDataSource != nil { + data["azure_blob_storage_data_source"] = flattenAzureBlobStorageData(transferSpec.AzureBlobStorageDataSource, d) + } else if transferSpec.PosixDataSource != nil { + data["posix_data_source"] = flattenPosixData(transferSpec.PosixDataSource) + } + + return []map[string]interface{}{data} +} + +func usingPosix(transferSpec *storagetransfer.TransferSpec) bool { + return transferSpec.PosixDataSource != nil || transferSpec.PosixDataSink != nil +} + +func expandTransferJobNotificationConfig(notificationConfigs []interface{}) *storagetransfer.NotificationConfig { + if len(notificationConfigs) == 0 || notificationConfigs[0] == nil { + return nil + } + + notificationConfig := notificationConfigs[0].(map[string]interface{}) + var apiData = &storagetransfer.NotificationConfig{ + PayloadFormat: notificationConfig["payload_format"].(string), + PubsubTopic: notificationConfig["pubsub_topic"].(string), + } + + if notificationConfig["event_types"] != nil { + apiData.EventTypes = tpgresource.ConvertStringArr(notificationConfig["event_types"].(*schema.Set).List()) + } + + log.Printf("[DEBUG] apiData: %v\n\n", apiData) + return apiData +} + +func flattenTransferJobNotificationConfig(notificationConfig *storagetransfer.NotificationConfig) []map[string]interface{} { + if notificationConfig == nil { + return nil + } + + data := map[string]interface{}{ + "payload_format": notificationConfig.PayloadFormat, + "pubsub_topic": notificationConfig.PubsubTopic, + } + + if notificationConfig.EventTypes != nil { + data["event_types"] = tpgresource.ConvertStringArrToInterface(notificationConfig.EventTypes) + } + + return []map[string]interface{}{data} +} diff --git a/mmv1/third_party/terraform/services/tags/go/resource_tags_location_tag_bindings.go.tmpl b/mmv1/third_party/terraform/services/tags/go/resource_tags_location_tag_bindings.go.tmpl new file mode 100644 index 000000000000..6a68145bbe7a --- /dev/null +++ b/mmv1/third_party/terraform/services/tags/go/resource_tags_location_tag_bindings.go.tmpl @@ -0,0 +1,388 @@ +package tags + +import ( + "fmt" + "log" + "reflect" + "strings" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func ResourceTagsLocationTagBinding() *schema.Resource { + return &schema.Resource{ + Create: resourceTagsLocationTagBindingCreate, + Read: resourceTagsLocationTagBindingRead, + Delete: resourceTagsLocationTagBindingDelete, + + Importer: &schema.ResourceImporter{ + State: resourceTagsLocationTagBindingImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(20 * time.Minute), + Delete: schema.DefaultTimeout(20 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "parent": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The full resource name of the resource the TagValue is bound to. E.g. //cloudresourcemanager.googleapis.com/projects/123`, + }, + "tag_value": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The TagValue of the TagBinding. Must be of the form tagValues/456.`, + }, + "location": { + Type: schema.TypeString, + ForceNew: true, + Optional: true, + Description: `The geographic location where the transfer config should reside. +Examples: US, EU, asia-northeast1. The default value is US.`, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: `The generated id for the TagBinding. This is a string of the form: 'tagBindings/{full-resource-name}/{tag-value-name}'`, + }, + }, + UseJSONNumber: true, + } +} + +func resourceTagsLocationTagBindingCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + obj := make(map[string]interface{}) + parentProp, err := expandNestedTagsLocationTagBindingParent(d.Get("parent"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("parent"); !tpgresource.IsEmptyValue(reflect.ValueOf(parentProp)) && (ok || !reflect.DeepEqual(v, parentProp)) { + obj["parent"] = parentProp + } + tagValueProp, err := expandNestedTagsLocationTagBindingTagValue(d.Get("tag_value"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("tag_value"); !tpgresource.IsEmptyValue(reflect.ValueOf(tagValueProp)) && (ok || !reflect.DeepEqual(v, tagValueProp)) { + obj["tagValue"] = tagValueProp + } + + lockName, err := tpgresource.ReplaceVars(d, config, "tagBindings/{{"{{"}}parent{{"}}"}}") + if err != nil { + return err + } + transport_tpg.MutexStore.Lock(lockName) + defer transport_tpg.MutexStore.Unlock(lockName) + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}TagsLocationBasePath{{"}}"}}tagBindings") + log.Printf("url for TagsLocation: %s", url) + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new LocationTagBinding: %#v", obj) + + billingProject := "" + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "POST", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutCreate), + }) + if err != nil { + return fmt.Errorf("Error creating LocationTagBinding: %s", err) + } + + // Use the resource in the operation response to populate + // identity fields and d.Id() before read + + var opRes map[string]interface{} + err = TagsLocationOperationWaitTimeWithResponse( + config, res, &opRes, "Creating LocationTagBinding", userAgent, + d.Timeout(schema.TimeoutCreate)) + + if err != nil { + d.SetId("") + return fmt.Errorf("Error waiting to create LocationTagBinding: %s", err) + } + + if _, ok := opRes["tagBindings"]; ok { + opRes, err = flattenNestedTagsLocationTagBinding(d, meta, opRes) + if err != nil { + return fmt.Errorf("Error getting nested object from operation response: %s", err) + } + if opRes == nil { + // Object isn't there any more - remove it from the state. + d.SetId("") + return fmt.Errorf("Error decoding response from operation, could not find nested object") + } + } + if err := d.Set("name", flattenNestedTagsLocationTagBindingName(opRes["name"], d, config)); err != nil { + return err + } + + id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}location{{"}}"}}/{{"{{"}}name{{"}}"}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + log.Printf("[DEBUG] Finished creating LocationTagBinding %q: %#v", d.Id(), res) + + return resourceTagsLocationTagBindingRead(d, meta) +} + +func resourceTagsLocationTagBindingRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}TagsLocationBasePath{{"}}"}}tagBindings/?parent={{"{{"}}parent{{"}}"}}&pageSize=300") + if err != nil { + return err + } + + billingProject := "" + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("TagsLocationTagBinding %q", d.Id())) + } + log.Printf("[DEBUG] Skipping res with name for import = %#v,)", res) + + p, ok := res["tagBindings"] + if !ok || p == nil { + return nil + } + pView := p.([]interface{}) + + //if there are more than 300 bindings - handling pagination over here + if pageToken, ok := res["nextPageToken"].(string); ok { + for pageToken != "" { + url, err = transport_tpg.AddQueryParams(url, map[string]string{"pageToken": fmt.Sprintf("%s", res["nextPageToken"])}) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("TagsLocationTagBinding %q", d.Id())) + } + resp, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, fmt.Sprintf("TagsLocationTagBinding %q", d.Id())) + } + if resp == nil { + d.SetId("") + return nil + } + v, ok := resp["tagBindings"] + if !ok || v == nil { + return nil + } + pView = append(pView, v.([]interface{})...) + if token, ok := res["nextPageToken"]; ok { + pageToken = token.(string) + } else { + pageToken = "" + } + } + } + + newMap := make(map[string]interface{}, 1) + newMap["tagBindings"] = pView + + res, err = flattenNestedTagsLocationTagBinding(d, meta, newMap) + if err != nil { + return err + } + + if err := d.Set("name", flattenNestedTagsLocationTagBindingName(res["name"], d, config)); err != nil { + return fmt.Errorf("Error reading LocationTagBinding: %s", err) + } + if err := d.Set("parent", flattenNestedTagsLocationTagBindingParent(res["parent"], d, config)); err != nil { + return fmt.Errorf("Error reading LocationTagBinding: %s", err) + } + if err := d.Set("tag_value", flattenNestedTagsLocationTagBindingTagValue(res["tagValue"], d, config)); err != nil { + return fmt.Errorf("Error reading LocationTagBinding: %s", err) + } + + return nil +} + +func resourceTagsLocationTagBindingDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + billingProject := "" + + lockName, err := tpgresource.ReplaceVars(d, config, "tagBindings/{{"{{"}}parent{{"}}"}}") + if err != nil { + return err + } + transport_tpg.MutexStore.Lock(lockName) + defer transport_tpg.MutexStore.Unlock(lockName) + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}TagsLocationBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + var obj map[string]interface{} + log.Printf("[DEBUG] Deleting LocationTagBinding %q", d.Id()) + + // err == nil indicates that the billing_project value was found + if bp, err := tpgresource.GetBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: billingProject, + RawURL: url, + UserAgent: userAgent, + Body: obj, + Timeout: d.Timeout(schema.TimeoutDelete), + }) + if err != nil { + return transport_tpg.HandleNotFoundError(err, d, "LocationTagBinding") + } + + err = TagsLocationOperationWaitTime( + config, res, "Deleting LocationTagBinding", userAgent, + d.Timeout(schema.TimeoutDelete)) + + if err != nil { + return err + } + + log.Printf("[DEBUG] Finished deleting LocationTagBinding %q: %#v", d.Id(), res) + return nil +} + +func resourceTagsLocationTagBindingImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + config := meta.(*transport_tpg.Config) + if err := tpgresource.ParseImportId([]string{"(?P[^/]+)/tagBindings/(?P[^/]+)/tagValues/(?P[^/]+)"}, d, config); err != nil { + return nil, err + } + + parent := d.Get("parent").(string) + parentProper := strings.ReplaceAll(parent, "%2F", "/") + d.Set("parent", parentProper) + d.Set("name", fmt.Sprintf("tagBindings/%s/tagValues/%s", parent, d.Get("tag_value").(string))) + id, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}location{{"}}"}}/{{"{{"}}name{{"}}"}}") + if err != nil { + return nil, fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + return []*schema.ResourceData{d}, nil +} + +func flattenNestedTagsLocationTagBindingName(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNestedTagsLocationTagBindingParent(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func flattenNestedTagsLocationTagBindingTagValue(v interface{}, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + return v +} + +func expandNestedTagsLocationTagBindingParent(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandNestedTagsLocationTagBindingTagValue(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func flattenNestedTagsLocationTagBinding(d *schema.ResourceData, meta interface{}, res map[string]interface{}) (map[string]interface{}, error) { + var v interface{} + var ok bool + + v, ok = res["tagBindings"] + if !ok || v == nil { + return nil, nil + } + + switch v.(type) { + case []interface{}: + log.Printf("[DEBUG] Hey it's in break = %#v,)", v) + break + case map[string]interface{}: + // Construct list out of single nested resource + v = []interface{}{v} + default: + return nil, fmt.Errorf("expected list or map for value tagBindings. Actual value: %v", v) + } + + _, item, err := resourceTagsLocationTagBindingFindNestedObjectInList(d, meta, v.([]interface{})) + if err != nil { + return nil, err + } + return item, nil +} + +func resourceTagsLocationTagBindingFindNestedObjectInList(d *schema.ResourceData, meta interface{}, items []interface{}) (index int, item map[string]interface{}, err error) { + expectedName := d.Get("name") + expectedFlattenedName := flattenNestedTagsLocationTagBindingName(expectedName, d, meta.(*transport_tpg.Config)) + + // Search list for this resource. + for idx, itemRaw := range items { + if itemRaw == nil { + continue + } + + item := itemRaw.(map[string]interface{}) + itemName := flattenNestedTagsLocationTagBindingName(item["name"], d, meta.(*transport_tpg.Config)) + // IsEmptyValue check so that if one is nil and the other is "", that's considered a match + if !(tpgresource.IsEmptyValue(reflect.ValueOf(itemName)) && tpgresource.IsEmptyValue(reflect.ValueOf(expectedFlattenedName))) && !reflect.DeepEqual(itemName, expectedFlattenedName) { + log.Printf("[DEBUG] Skipping item with name= %#v, looking for %#v)", itemName, expectedFlattenedName) + continue + } + return idx, item, nil + } + return -1, nil, nil +} diff --git a/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_accelerator_types.go.tmpl b/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_accelerator_types.go.tmpl new file mode 100644 index 000000000000..a0bc2d1cde43 --- /dev/null +++ b/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_accelerator_types.go.tmpl @@ -0,0 +1,95 @@ +package tpuv2 + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + "log" + "sort" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceTpuV2AcceleratorTypes() *schema.Resource { + return &schema.Resource{ + Read: dataSourceTpuV2AcceleratorTypesRead, + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "zone": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "types": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + } +} + +func dataSourceTpuV2AcceleratorTypesRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + zone, err := tpgresource.GetZone(d, config) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}TpuV2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}zone{{"}}"}}/acceleratorTypes") + if err != nil { + return err + } + + typesRaw, err := tpgresource.PaginatedListRequest(project, url, userAgent, config, flattenTpuV2AcceleratorTypes) + if err != nil { + return fmt.Errorf("error listing TPU v2 accelerator types: %s", err) + } + + types := make([]string, len(typesRaw)) + for i, typeRaw := range typesRaw { + types[i] = typeRaw.(string) + } + sort.Strings(types) + + log.Printf("[DEBUG] Received Google TPU v2 accelerator types: %q", types) + + if err := d.Set("types", types); err != nil { + return fmt.Errorf("error setting types: %s", err) + } + if err := d.Set("zone", zone); err != nil { + return fmt.Errorf("error setting zone: %s", err) + } + if err := d.Set("project", project); err != nil { + return fmt.Errorf("error setting project: %s", err) + } + d.SetId(fmt.Sprintf("projects/%s/zones/%s", project, zone)) + + return nil +} + +func flattenTpuV2AcceleratorTypes(resp map[string]interface{}) []interface{} { + typeObjList := resp["acceleratorTypes"].([]interface{}) + types := make([]interface{}, len(typeObjList)) + for i, typ := range typeObjList { + typeObj := typ.(map[string]interface{}) + types[i] = typeObj["type"] + } + return types +} +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_accelerator_types_test.go.tmpl b/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_accelerator_types_test.go.tmpl new file mode 100644 index 000000000000..48a3b43f8e82 --- /dev/null +++ b/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_accelerator_types_test.go.tmpl @@ -0,0 +1,72 @@ +package tpuv2_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "errors" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccTpuV2AcceleratorTypes_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccTpuV2AcceleratorTypesConfig, + Check: resource.ComposeTestCheckFunc( + testAccCheckTpuV2AcceleratorTypes("data.google_tpu_v2_accelerator_types.available"), + ), + }, + }, + }) +} + +func testAccCheckTpuV2AcceleratorTypes(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("can't find TPU v2 accelerator types data source: %s", n) + } + + if rs.Primary.ID == "" { + return errors.New("data source id not set") + } + + count, ok := rs.Primary.Attributes["types.#"] + if !ok { + return errors.New("can't find 'types' attribute") + } + + cnt, err := strconv.Atoi(count) + if err != nil { + return errors.New("failed to read number of types") + } + if cnt < 2 { + return fmt.Errorf("expected at least 2 types, received %d, this is most likely a bug", cnt) + } + + for i := 0; i < cnt; i++ { + idx := fmt.Sprintf("types.%d", i) + _, ok := rs.Primary.Attributes[idx] + if !ok { + return fmt.Errorf("expected %q, type not found", idx) + } + } + return nil + } +} + +const testAccTpuV2AcceleratorTypesConfig = ` +data "google_tpu_v2_accelerator_types" "available" { + provider = google-beta +} +` +{{- end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_runtime_versions.go.tmpl b/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_runtime_versions.go.tmpl new file mode 100644 index 000000000000..2a365a0f00d2 --- /dev/null +++ b/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_runtime_versions.go.tmpl @@ -0,0 +1,95 @@ +package tpuv2 + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "fmt" + "log" + "sort" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func DataSourceTpuV2RuntimeVersions() *schema.Resource { + return &schema.Resource{ + Read: dataSourceTpuV2RuntimeVersionsRead, + Schema: map[string]*schema.Schema{ + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "zone": { + Type: schema.TypeString, + Optional: true, + Computed: true, + }, + "versions": { + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + }, + } +} + +func dataSourceTpuV2RuntimeVersionsRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*transport_tpg.Config) + userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent) + if err != nil { + return err + } + + project, err := tpgresource.GetProject(d, config) + if err != nil { + return err + } + + zone, err := tpgresource.GetZone(d, config) + if err != nil { + return err + } + + url, err := tpgresource.ReplaceVars(d, config, "{{"{{"}}TpuV2BasePath{{"}}"}}projects/{{"{{"}}project{{"}}"}}/locations/{{"{{"}}zone{{"}}"}}/runtimeVersions") + if err != nil { + return err + } + + versionsRaw, err := tpgresource.PaginatedListRequest(project, url, userAgent, config, flattenTpuV2RuntimeVersions) + if err != nil { + return fmt.Errorf("error listing TPU v2 runtime versions: %s", err) + } + + versions := make([]string, len(versionsRaw)) + for i, ver := range versionsRaw { + versions[i] = ver.(string) + } + sort.Strings(versions) + + log.Printf("[DEBUG] Received Google TPU v2 runtime versions: %q", versions) + + if err := d.Set("versions", versions); err != nil { + return fmt.Errorf("error setting versions: %s", err) + } + if err := d.Set("zone", zone); err != nil { + return fmt.Errorf("error setting zone: %s", err) + } + if err := d.Set("project", project); err != nil { + return fmt.Errorf("error setting project: %s", err) + } + d.SetId(fmt.Sprintf("projects/%s/zones/%s", project, zone)) + + return nil +} + +func flattenTpuV2RuntimeVersions(resp map[string]interface{}) []interface{} { + verObjList := resp["runtimeVersions"].([]interface{}) + versions := make([]interface{}, len(verObjList)) + for i, v := range verObjList { + verObj := v.(map[string]interface{}) + versions[i] = verObj["version"] + } + return versions +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_runtime_versions_test.go.tmpl b/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_runtime_versions_test.go.tmpl new file mode 100644 index 000000000000..cf73db89e4c4 --- /dev/null +++ b/mmv1/third_party/terraform/services/tpuv2/go/data_source_tpu_v2_runtime_versions_test.go.tmpl @@ -0,0 +1,72 @@ +package tpuv2_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "errors" + "fmt" + "strconv" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccTpuV2RuntimeVersions_basic(t *testing.T) { + t.Parallel() + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccTpuV2RuntimeVersionsConfig, + Check: resource.ComposeTestCheckFunc( + testAccCheckTpuV2RuntimeVersions("data.google_tpu_v2_runtime_versions.available"), + ), + }, + }, + }) +} + +func testAccCheckTpuV2RuntimeVersions(n string) resource.TestCheckFunc { + return func(s *terraform.State) error { + rs, ok := s.RootModule().Resources[n] + if !ok { + return fmt.Errorf("can't find TPU v2 runtime versions data source: %s", n) + } + + if rs.Primary.ID == "" { + return errors.New("data source id not set") + } + + count, ok := rs.Primary.Attributes["versions.#"] + if !ok { + return errors.New("can't find 'versions' attribute") + } + + cnt, err := strconv.Atoi(count) + if err != nil { + return errors.New("failed to read number of versions") + } + if cnt < 2 { + return fmt.Errorf("expected at least 2 versions, received %d, this is most likely a bug", cnt) + } + + for i := 0; i < cnt; i++ { + idx := fmt.Sprintf("versions.%d", i) + _, ok := rs.Primary.Attributes[idx] + if !ok { + return fmt.Errorf("expected %q, version not found", idx) + } + } + return nil + } +} + +const testAccTpuV2RuntimeVersionsConfig = ` +data "google_tpu_v2_runtime_versions" "available" { + provider = google-beta +} +` +{{- end }} diff --git a/mmv1/third_party/terraform/services/tpuv2/go/resource_tpu_v2_vm_test.go.tmpl b/mmv1/third_party/terraform/services/tpuv2/go/resource_tpu_v2_vm_test.go.tmpl new file mode 100644 index 000000000000..db3405ed03c0 --- /dev/null +++ b/mmv1/third_party/terraform/services/tpuv2/go/resource_tpu_v2_vm_test.go.tmpl @@ -0,0 +1,168 @@ +package tpuv2_test + +{{ if ne $.TargetVersionName `ga` -}} +import ( + "testing" + "strconv" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccTpuV2Vm_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckTpuV2VmDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccTpuV2Vm_full(context), + }, + { + ResourceName: "google_tpu_v2_vm.tpu", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels", "zone"}, + }, + { + Config: testAccTpuV2Vm_update(context, true), + }, + { + ResourceName: "google_tpu_v2_vm.tpu", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels", "zone"}, + }, + { + Config: testAccTpuV2Vm_update(context, false), + }, + { + ResourceName: "google_tpu_v2_vm.tpu", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"labels", "terraform_labels", "zone"}, + }, + }, + }) +} + +func testAccTpuV2Vm_full(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_tpu_v2_vm" "tpu" { + provider = google-beta + + name = "tf-test-tpu-%{random_suffix}" + zone = "us-central1-c" + description = "Text description of the TPU." + + runtime_version = "tpu-vm-tf-2.13.0" + accelerator_type = "v2-8" + + scheduling_config { + preemptible = true + } + + data_disks { + source_disk = google_compute_disk.disk.id + mode = "READ_ONLY" + } + + labels = { + foo = "bar" + } + + metadata = { + foo = "bar" + } + + tags = ["foo"] + + lifecycle { + prevent_destroy = true + } +} + +resource "google_compute_disk" "disk" { + provider = google-beta + + name = "tf-test-tpu-disk-%{random_suffix}" + image = "debian-cloud/debian-11" + size = 10 + type = "pd-ssd" + zone = "us-central1-c" +} +`, context) +} + +func testAccTpuV2Vm_update(context map[string]interface{}, preventDestroy bool) string { + context["prevent_destroy"] = strconv.FormatBool(preventDestroy) + + return acctest.Nprintf(` +resource "google_tpu_v2_vm" "tpu" { + provider = google-beta + + name = "tf-test-tpu-%{random_suffix}" + zone = "us-central1-c" + description = "Text description of the TPU updated." + + runtime_version = "tpu-vm-tf-2.13.0" + accelerator_type = "v2-8" + + scheduling_config { + preemptible = true + } + + data_disks { + source_disk = google_compute_disk.disk.id + mode = "READ_WRITE" + } + + data_disks { + source_disk = google_compute_disk.disk2.id + mode = "READ_ONLY" + } + + labels = { + baz = "bar" + } + + metadata = { + baz = "bar" + } + + tags = ["baz"] + + lifecycle { + prevent_destroy = %{prevent_destroy} + } +} + +resource "google_compute_disk" "disk" { + provider = google-beta + + name = "tf-test-tpu-disk-%{random_suffix}" + image = "debian-cloud/debian-11" + size = 10 + type = "pd-ssd" + zone = "us-central1-c" +} + +resource "google_compute_disk" "disk2" { + provider = google-beta + + name = "tf-test-tpu-disk2-%{random_suffix}" + image = "debian-cloud/debian-11" + size = 10 + type = "pd-ssd" + zone = "us-central1-c" +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/vertexai/go/iam_vertex_endpoint_test.go.tmpl b/mmv1/third_party/terraform/services/vertexai/go/iam_vertex_endpoint_test.go.tmpl new file mode 100644 index 000000000000..f4c9dfb56400 --- /dev/null +++ b/mmv1/third_party/terraform/services/vertexai/go/iam_vertex_endpoint_test.go.tmpl @@ -0,0 +1,363 @@ +package vertexai_test + +{{ if ne $.TargetVersionName `ga` -}} + +import ( + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/envvar" +) + +func TestAccVertexAIEndpointIamBinding(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "role": "roles/viewer", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccVertexAIEndpointIamBinding_basic(context), + }, + { + ResourceName: "google_vertex_ai_endpoint_iam_binding.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/endpoints/%s roles/viewer", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-endpoint-name%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + { + // Test Iam Binding update + Config: testAccVertexAIEndpointIamBinding_update(context), + }, + { + ResourceName: "google_vertex_ai_endpoint_iam_binding.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/endpoints/%s roles/viewer", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-endpoint-name%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccVertexAIEndpointIamMember(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "role": "roles/viewer", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + // Test Iam Member creation (no update for member, no need to test) + Config: testAccVertexAIEndpointIamMember_basic(context), + }, + { + ResourceName: "google_vertex_ai_endpoint_iam_member.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/endpoints/%s roles/viewer user:admin@hashicorptest.com", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-endpoint-name%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAccVertexAIEndpointIamPolicy(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "role": "roles/viewer", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccVertexAIEndpointIamPolicy_basic(context), + Check: resource.TestCheckResourceAttrSet("data.google_vertex_ai_endpoint_iam_policy.foo", "policy_data"), + }, + { + ResourceName: "google_vertex_ai_endpoint_iam_policy.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/endpoints/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-endpoint-name%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccVertexAIEndpointIamPolicy_emptyBinding(context), + }, + { + ResourceName: "google_vertex_ai_endpoint_iam_policy.foo", + ImportStateId: fmt.Sprintf("projects/%s/locations/%s/endpoints/%s", envvar.GetTestProjectFromEnv(), envvar.GetTestRegionFromEnv(), fmt.Sprintf("tf-test-endpoint-name%s", context["random_suffix"])), + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func testAccVertexAIEndpointIamMember_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint" "endpoint" { + name = "tf-test-endpoint-name%{random_suffix}" + display_name = "sample-endpoint" + description = "A sample vertex endpoint" + location = "us-central1" + region = "us-central1" + labels = { + label-one = "value-one" + } + network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.vertex_network.name}" + depends_on = [ + google_service_networking_connection.vertex_vpc_connection + ] +} + +resource "google_service_networking_connection" "vertex_vpc_connection" { + network = google_compute_network.vertex_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.vertex_range.name] +} + +resource "google_compute_global_address" "vertex_range" { + name = "tf-test-address-name%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = google_compute_network.vertex_network.id +} + +resource "google_compute_network" "vertex_network" { + name = "tf-test-network-name%{random_suffix}" +} + +data "google_project" "project" {} + +resource "google_vertex_ai_endpoint_iam_member" "foo" { +project = google_vertex_ai_endpoint.endpoint.project +location = google_vertex_ai_endpoint.endpoint.location +endpoint = google_vertex_ai_endpoint.endpoint.name + role = "%{role}" + member = "user:admin@hashicorptest.com" +} +`, context) +} + +func testAccVertexAIEndpointIamPolicy_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint" "endpoint" { + name = "tf-test-endpoint-name%{random_suffix}" + display_name = "sample-endpoint" + description = "A sample vertex endpoint" + location = "us-central1" + region = "us-central1" + labels = { + label-one = "value-one" + } + network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.vertex_network.name}" + depends_on = [ + google_service_networking_connection.vertex_vpc_connection + ] +} + +resource "google_service_networking_connection" "vertex_vpc_connection" { + network = google_compute_network.vertex_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.vertex_range.name] +} + +resource "google_compute_global_address" "vertex_range" { + name = "tf-test-address-name%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = google_compute_network.vertex_network.id +} + +resource "google_compute_network" "vertex_network" { + name = "tf-test-network-name%{random_suffix}" +} + +data "google_project" "project" {} + +data "google_iam_policy" "foo" { + binding { + role = "%{role}" + members = ["user:admin@hashicorptest.com"] + } +} + +resource "google_vertex_ai_endpoint_iam_policy" "foo" { +project = google_vertex_ai_endpoint.endpoint.project +location = google_vertex_ai_endpoint.endpoint.location +endpoint = google_vertex_ai_endpoint.endpoint.name + policy_data = data.google_iam_policy.foo.policy_data +} + +data "google_vertex_ai_endpoint_iam_policy" "foo" { +project = google_vertex_ai_endpoint.endpoint.project +location = google_vertex_ai_endpoint.endpoint.location +endpoint = google_vertex_ai_endpoint.endpoint.name + depends_on = [ + google_vertex_ai_endpoint_iam_policy.foo + ] +} +`, context) +} + +func testAccVertexAIEndpointIamPolicy_emptyBinding(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint" "endpoint" { + name = "tf-test-endpoint-name%{random_suffix}" + display_name = "sample-endpoint" + description = "A sample vertex endpoint" + location = "us-central1" + region = "us-central1" + labels = { + label-one = "value-one" + } + network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.vertex_network.name}" + depends_on = [ + google_service_networking_connection.vertex_vpc_connection + ] +} + +resource "google_service_networking_connection" "vertex_vpc_connection" { + network = google_compute_network.vertex_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.vertex_range.name] +} + +resource "google_compute_global_address" "vertex_range" { + name = "tf-test-address-name%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = google_compute_network.vertex_network.id +} + +resource "google_compute_network" "vertex_network" { + name = "tf-test-network-name%{random_suffix}" +} + +data "google_project" "project" {} + +data "google_iam_policy" "foo" { +} + +resource "google_vertex_ai_endpoint_iam_policy" "foo" { +project = google_vertex_ai_endpoint.endpoint.project +location = google_vertex_ai_endpoint.endpoint.location +endpoint = google_vertex_ai_endpoint.endpoint.name + policy_data = data.google_iam_policy.foo.policy_data +} +`, context) +} + +func testAccVertexAIEndpointIamBinding_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint" "endpoint" { + name = "tf-test-endpoint-name%{random_suffix}" + display_name = "sample-endpoint" + description = "A sample vertex endpoint" + location = "us-central1" + region = "us-central1" + labels = { + label-one = "value-one" + } + network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.vertex_network.name}" + depends_on = [ + google_service_networking_connection.vertex_vpc_connection + ] +} + +resource "google_service_networking_connection" "vertex_vpc_connection" { + network = google_compute_network.vertex_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.vertex_range.name] +} + +resource "google_compute_global_address" "vertex_range" { + name = "tf-test-address-name%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = google_compute_network.vertex_network.id +} + +resource "google_compute_network" "vertex_network" { + name = "tf-test-network-name%{random_suffix}" +} + +data "google_project" "project" {} + +resource "google_vertex_ai_endpoint_iam_binding" "foo" { + project = google_vertex_ai_endpoint.endpoint.project + location = google_vertex_ai_endpoint.endpoint.location + endpoint = google_vertex_ai_endpoint.endpoint.name + role = "%{role}" + members = ["user:admin@hashicorptest.com"] +} +`, context) +} + +func testAccVertexAIEndpointIamBinding_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_vertex_ai_endpoint" "endpoint" { + name = "tf-test-endpoint-name%{random_suffix}" + display_name = "sample-endpoint" + description = "A sample vertex endpoint" + location = "us-central1" + region = "us-central1" + labels = { + label-one = "value-one" + } + network = "projects/${data.google_project.project.number}/global/networks/${google_compute_network.vertex_network.name}" + depends_on = [ + google_service_networking_connection.vertex_vpc_connection + ] +} + +resource "google_service_networking_connection" "vertex_vpc_connection" { + network = google_compute_network.vertex_network.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.vertex_range.name] +} + +resource "google_compute_global_address" "vertex_range" { + name = "tf-test-address-name%{random_suffix}" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 24 + network = google_compute_network.vertex_network.id +} + +resource "google_compute_network" "vertex_network" { + name = "tf-test-network-name%{random_suffix}" +} + +data "google_project" "project" {} + +resource "google_vertex_ai_endpoint_iam_binding" "foo" { +project = google_vertex_ai_endpoint.endpoint.project +location = google_vertex_ai_endpoint.endpoint.location +endpoint = google_vertex_ai_endpoint.endpoint.name + role = "%{role}" + members = ["user:admin@hashicorptest.com", "user:gterraformtest1@gmail.com"] +} +`, context) +} + +{{ end }} diff --git a/mmv1/third_party/terraform/services/vertexai/go/resource_vertex_ai_metadata_store_test.go.tmpl b/mmv1/third_party/terraform/services/vertexai/go/resource_vertex_ai_metadata_store_test.go.tmpl new file mode 100644 index 000000000000..a1109bbbe47c --- /dev/null +++ b/mmv1/third_party/terraform/services/vertexai/go/resource_vertex_ai_metadata_store_test.go.tmpl @@ -0,0 +1,93 @@ +package vertexai_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "fmt" + "github.com/hashicorp/terraform-provider-google/google/acctest" + "testing" + "strings" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccVertexAIMetadataStore_vertexAiMetadataStoreExample(t *testing.T) { + t.Parallel() + + kms := acctest.BootstrapKMSKeyInLocation(t, "us-central1") + name := fmt.Sprintf("tf-test-%s", acctest.RandString(t, 10)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckVertexAIMetadataStoreDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccVertexAIMetadataStore_vertexAiMetadataStoreExample(name, kms.CryptoKey.Name), + }, + { + ResourceName: "google_vertex_ai_metadata_store.store", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"region"}, + }, + }, + }) +} + +func testAccVertexAIMetadataStore_vertexAiMetadataStoreExample(name, kmsKey string) string { + return fmt.Sprintf(` +resource "google_vertex_ai_metadata_store" "store" { + name = "%s" + description = "Magic" + region = "us-central1" + encryption_spec { + kms_key_name = "%s" + } +} +`, name, kmsKey) +} + +func testAccCheckVertexAIMetadataStoreDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_vertex_ai_metadata_store" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := acctest.GoogleProviderConfig(t) + + url, err := tpgresource.ReplaceVarsForTest(config, rs, "{{"{{"}}VertexAIBasePath{{"}}"}}{{"{{"}}name{{"}}"}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: billingProject, + RawURL: url, + UserAgent: config.UserAgent, + }) + if err == nil { + return fmt.Errorf("VertexAIMetadataStore still exists at %s", url) + } + } + + return nil + } +} + + +{{ end }} \ No newline at end of file diff --git a/mmv1/third_party/terraform/services/vertexai/go/vertex_ai_operation.go.tmpl b/mmv1/third_party/terraform/services/vertexai/go/vertex_ai_operation.go.tmpl new file mode 100644 index 000000000000..30f0f3f30d73 --- /dev/null +++ b/mmv1/third_party/terraform/services/vertexai/go/vertex_ai_operation.go.tmpl @@ -0,0 +1,77 @@ +package vertexai + +import ( + "encoding/json" + "fmt" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +type VertexAIOperationWaiter struct { + Config *transport_tpg.Config + UserAgent string + Project string + tpgresource.CommonOperationWaiter +} + +func (w *VertexAIOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + + region := tpgresource.GetRegionFromRegionalSelfLink(w.CommonOperationWaiter.Op.Name) + + // Returns the proper get. +{{- if eq $.TargetVersionName "ga" }} + url := fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1/%s", region, w.CommonOperationWaiter.Op.Name) +{{- else }} + url := fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1beta1/%s", region, w.CommonOperationWaiter.Op.Name) +{{- end }} + + return transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: w.Config, + Method: "GET", + Project: w.Project, + RawURL: url, + UserAgent: w.UserAgent, + }) +} + +func createVertexAIWaiter(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string) (*VertexAIOperationWaiter, error) { + w := &VertexAIOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func VertexAIOperationWaitTimeWithResponse(config *transport_tpg.Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createVertexAIWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := tpgresource.OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func VertexAIOperationWaitTime(config *transport_tpg.Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createVertexAIWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return tpgresource.OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_feature_group_test.go b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_feature_group_test.go index a31bb11f5bee..22db16aef282 100644 --- a/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_feature_group_test.go +++ b/mmv1/third_party/terraform/services/vertexai/resource_vertex_ai_feature_group_test.go @@ -78,6 +78,12 @@ resource "google_bigquery_table" "sample_table" { "type": "STRING", "mode": "NULLABLE" }, + { + "name": "test_entity_column", + "type": "STRING", + "mode": "NULLABLE", + "description": "test secondary entity column" + }, { "name": "feature_timestamp", "type": "TIMESTAMP", @@ -103,7 +109,7 @@ func testAccVertexAIFeatureGroup_updated(context map[string]interface{}) string big_query_source { input_uri = "bq://${google_bigquery_table.sample_table.project}.${google_bigquery_table.sample_table.dataset_id}.${google_bigquery_table.sample_table.table_id}" } - entity_id_columns = ["feature_id"] + entity_id_columns = ["feature_id","test_entity_column"] } } @@ -126,6 +132,12 @@ resource "google_bigquery_table" "sample_table" { "type": "STRING", "mode": "NULLABLE" }, + { + "name": "test_entity_column", + "type": "STRING", + "mode": "NULLABLE", + "description": "test secondary entity column" + }, { "name": "feature_timestamp", "type": "TIMESTAMP", diff --git a/mmv1/third_party/terraform/services/vmwareengine/resource_vmwareengine_network_sweeper.go b/mmv1/third_party/terraform/services/vmwareengine/resource_vmwareengine_network_sweeper.go new file mode 100644 index 000000000000..8c5140376384 --- /dev/null +++ b/mmv1/third_party/terraform/services/vmwareengine/resource_vmwareengine_network_sweeper.go @@ -0,0 +1,132 @@ +package vmwareengine + +import ( + "context" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/sweeper" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func init() { + sweeper.AddTestSweepers("VmwareengineNetwork", testSweepVmwareengineNetwork) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepVmwareengineNetwork(region string) error { + resourceName := "VmwareengineNetwork" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sweeper.SharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := envvar.GetTestBillingAccountFromEnv(t) + + // List of location values includes: + // * global location + // * regions used for this resource type's acc tests in the past + // * the 'region' passed to the sweeper + locations := []string{region, "global", "southamerica-west1", "me-west1"} + log.Printf("[INFO][SWEEPER_LOG] Sweeping will include these locations: %v.", locations) + for _, location := range locations { + log.Printf("[INFO][SWEEPER_LOG] Beginning the process of sweeping location '%s'.", location) + + // Setup variables to replace in list template + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": location, + "location": location, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://vmwareengine.googleapis.com/v1/projects/{{project}}/locations/{{location}}/vmwareEngineNetworks", "?")[0] + listUrl, err := tpgresource.ReplaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + continue + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: listUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + continue + } + + resourceList, ok := res["vmwareEngineNetworks"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + continue + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + continue + } + + name := tpgresource.GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !sweeper.IsSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://vmwareengine.googleapis.com/v1/projects/{{project}}/locations/{{location}}/vmwareEngineNetworks/{{name}}" + deleteUrl, err := tpgresource.ReplaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + continue + } + deleteUrl = deleteUrl + name + + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/vmwareengine/resource_vmwareengine_private_cloud_sweeper.go b/mmv1/third_party/terraform/services/vmwareengine/resource_vmwareengine_private_cloud_sweeper.go new file mode 100644 index 000000000000..6df3f302205a --- /dev/null +++ b/mmv1/third_party/terraform/services/vmwareengine/resource_vmwareengine_private_cloud_sweeper.go @@ -0,0 +1,137 @@ +package vmwareengine + +import ( + "context" + "fmt" + "log" + "strings" + "testing" + + "github.com/hashicorp/terraform-provider-google/google/envvar" + "github.com/hashicorp/terraform-provider-google/google/sweeper" + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func init() { + sweeper.AddTestSweepers("VmwareenginePrivateCloud", testSweepVmwareenginePrivateCloud) +} + +// At the time of writing, the CI only passes us-central1 as the region +func testSweepVmwareenginePrivateCloud(region string) error { + resourceName := "VmwareenginePrivateCloud" + log.Printf("[INFO][SWEEPER_LOG] Starting sweeper for %s", resourceName) + + config, err := sweeper.SharedConfigForRegion(region) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error getting shared config for region: %s", err) + return err + } + + err = config.LoadAndValidate(context.Background()) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error loading: %s", err) + return err + } + + t := &testing.T{} + billingId := envvar.GetTestBillingAccountFromEnv(t) + + // List of location values includes: + // * zones used for this resource type's acc tests in the past + // * the 'region' passed to the sweeper + locations := []string{region, "southamerica-west1-a", "me-west1-a"} + log.Printf("[INFO][SWEEPER_LOG] Sweeping will include these locations: %v.", locations) + for _, location := range locations { + log.Printf("[INFO][SWEEPER_LOG] Beginning the process of sweeping location '%s'.", location) + + // Setup variables to replace in list template + d := &tpgresource.ResourceDataMock{ + FieldsInSchema: map[string]interface{}{ + "project": config.Project, + "region": location, + "location": location, + "zone": "-", + "billing_account": billingId, + }, + } + + listTemplate := strings.Split("https://vmwareengine.googleapis.com/v1/projects/{{project}}/locations/{{location}}/privateClouds", "?")[0] + listUrl, err := tpgresource.ReplaceVars(d, config, listTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing sweeper list url: %s", err) + continue + } + + res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "GET", + Project: config.Project, + RawURL: listUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error in response from request %s: %s", listUrl, err) + continue + } + + resourceList, ok := res["privateClouds"] + if !ok { + log.Printf("[INFO][SWEEPER_LOG] Nothing found in response.") + continue + } + + rl := resourceList.([]interface{}) + + log.Printf("[INFO][SWEEPER_LOG] Found %d items in %s list response.", len(rl), resourceName) + // Keep count of items that aren't sweepable for logging. + nonPrefixCount := 0 + for _, ri := range rl { + obj := ri.(map[string]interface{}) + if obj["name"] == nil { + log.Printf("[INFO][SWEEPER_LOG] %s resource name was nil", resourceName) + continue + } + + name := tpgresource.GetResourceNameFromSelfLink(obj["name"].(string)) + // Skip resources that shouldn't be sweeped + if !sweeper.IsSweepableTestResource(name) { + nonPrefixCount++ + continue + } + + deleteTemplate := "https://vmwareengine.googleapis.com/v1/projects/{{project}}/locations/{{location}}/privateClouds/{{name}}" + deleteUrl, err := tpgresource.ReplaceVars(d, config, deleteTemplate) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] error preparing delete url: %s", err) + continue + } + deleteUrl = deleteUrl + name + + // We force delete the Private Cloud and ensure there's no delay in deletion + force := true + delayHours := 0 + deleteUrl = deleteUrl + fmt.Sprintf("?force=%t&delayHours=%d", force, delayHours) + + // Don't wait on operations as we may have a lot to delete + _, err = transport_tpg.SendRequest(transport_tpg.SendRequestOptions{ + Config: config, + Method: "DELETE", + Project: config.Project, + RawURL: deleteUrl, + UserAgent: config.UserAgent, + }) + if err != nil { + log.Printf("[INFO][SWEEPER_LOG] Error deleting for url %s : %s", deleteUrl, err) + } else { + log.Printf("[INFO][SWEEPER_LOG] Sent delete request for %s resource: %s", resourceName, name) + } + } + + if nonPrefixCount > 0 { + log.Printf("[INFO][SWEEPER_LOG] %d items were non-sweepable and skipped.", nonPrefixCount) + } + } + + return nil +} diff --git a/mmv1/third_party/terraform/services/workbench/go/resource_workbench_instance_shielded_config_test.go b/mmv1/third_party/terraform/services/workbench/go/resource_workbench_instance_shielded_config_test.go new file mode 100644 index 000000000000..c3e3eb7a30c9 --- /dev/null +++ b/mmv1/third_party/terraform/services/workbench/go/resource_workbench_instance_shielded_config_test.go @@ -0,0 +1,228 @@ +package workbench_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccWorkbenchInstance_shielded_config_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccWorkbenchInstance_shielded_config_remove(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccWorkbenchInstance_shielded_config_double_apply(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_none(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_false(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_shielded_config_true(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_shielded_config_true(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + shielded_instance_config { + enable_secure_boot = true + enable_vtpm = true + enable_integrity_monitoring = true + } + } +} +`, context) +} + +func testAccWorkbenchInstance_shielded_config_false(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + shielded_instance_config { + enable_secure_boot = false + enable_vtpm = false + enable_integrity_monitoring = false + } + } + +} +`, context) +} + +func testAccWorkbenchInstance_shielded_config_none(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/workbench/go/resource_workbench_instance_test.go b/mmv1/third_party/terraform/services/workbench/go/resource_workbench_instance_test.go new file mode 100644 index 000000000000..a931960089f2 --- /dev/null +++ b/mmv1/third_party/terraform/services/workbench/go/resource_workbench_instance_test.go @@ -0,0 +1,626 @@ +package workbench_test + +import ( + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" +) + +func TestAccWorkbenchInstance_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_update(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" +} +`, context) +} + +func testAccWorkbenchInstance_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + machine_type = "n1-standard-16" + + accelerator_configs{ + type = "NVIDIA_TESLA_T4" + core_count = 1 + } + + shielded_instance_config { + enable_secure_boot = false + enable_vtpm = true + enable_integrity_monitoring = false + } + + boot_disk { + disk_size_gb = 310 + } + + data_disks { + disk_size_gb = 330 + } + + metadata = { + terraform = "true" + } + + } + + labels = { + k = "val" + } + +} +`, context) +} + +func TestAccWorkbenchInstance_updateGpu(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_basicGpu(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_updateGpu(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_basicGpu(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + gce_setup { + machine_type = "n1-standard-1" // cant be e2 because of accelerator + accelerator_configs { + type = "NVIDIA_TESLA_T4" + core_count = 1 + } + + } +} +`, context) +} + +func testAccWorkbenchInstance_updateGpu(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + machine_type = "n1-standard-16" + + accelerator_configs{ + type = "NVIDIA_TESLA_P4" + core_count = 1 + } + + shielded_instance_config { + enable_secure_boot = false + enable_vtpm = true + enable_integrity_monitoring = false + } + + } + + labels = { + k = "val" + } + +} +`, context) +} + +func TestAccWorkbenchInstance_removeGpu(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_Gpu(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_updateGpu(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_Gpu(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + gce_setup { + machine_type = "n1-standard-1" // cant be e2 because of accelerator + accelerator_configs { + type = "NVIDIA_TESLA_T4" + core_count = 1 + } + + } +} +`, context) +} + +func testAccWorkbenchInstance_removeGpu(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + machine_type = "n1-standard-16" + + } + +} +`, context) +} + +func TestAccWorkbenchInstance_updateMetadata(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_updateMetadata(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_updateMetadata(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + metadata = { + terraform = "true" + } + } + + labels = { + k = "val" + } + +} +`, context) +} + +func TestAccWorkbenchInstance_updateState(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + { + Config: testAccWorkbenchInstance_updateState(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "STOPPED"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + { + Config: testAccWorkbenchInstance_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_updateState(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + desired_state = "STOPPED" + +} +`, context) +} + +func TestAccWorkbenchInstance_empty_accelerator(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_empty_accelerator(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkbenchInstance_empty_accelerator(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_empty_accelerator(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + + gce_setup { + accelerator_configs{ + } + } +} +`, context) +} + +func TestAccWorkbenchInstance_updateBootDisk(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + { + Config: testAccWorkbenchInstance_updateBootDisk(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + }, + }) +} + +func TestAccWorkbenchInstance_updateDataDisk(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + { + Config: testAccWorkbenchInstance_updateDataDisk(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + }, + }) +} + +func TestAccWorkbenchInstance_updateBothDisks(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkbenchInstance_basic(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + { + Config: testAccWorkbenchInstance_updateBothDisks(context), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr( + "google_workbench_instance.instance", "state", "ACTIVE"), + ), + }, + { + ResourceName: "google_workbench_instance.instance", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"name", "instance_owners", "location", "instance_id", "request_id", "labels", "terraform_labels","desired_state"}, + }, + }, + }) +} + +func testAccWorkbenchInstance_updateBootDisk(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + gce_setup { + boot_disk { + disk_size_gb = 310 + } + } +} +`, context) +} + +func testAccWorkbenchInstance_updateDataDisk(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + gce_setup { + data_disks { + disk_size_gb = 330 + } + } +} +`, context) +} + +func testAccWorkbenchInstance_updateBothDisks(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_workbench_instance" "instance" { + name = "tf-test-workbench-instance%{random_suffix}" + location = "us-central1-a" + gce_setup { + boot_disk { + disk_size_gb = 310 + } + + data_disks { + disk_size_gb = 330 + } + } +} +`, context) +} diff --git a/mmv1/third_party/terraform/services/workflows/go/resource_workflows_workflow_test.go b/mmv1/third_party/terraform/services/workflows/go/resource_workflows_workflow_test.go new file mode 100644 index 000000000000..521c81231a90 --- /dev/null +++ b/mmv1/third_party/terraform/services/workflows/go/resource_workflows_workflow_test.go @@ -0,0 +1,222 @@ +package workflows_test + +import ( + "context" + "fmt" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + + "github.com/hashicorp/terraform-provider-google/google/acctest" + "github.com/hashicorp/terraform-provider-google/google/services/workflows" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func TestAccWorkflowsWorkflow_Update(t *testing.T) { + // Custom test written to test diffs + t.Parallel() + + workflowName := fmt.Sprintf("tf-test-acc-workflow-%d", acctest.RandInt(t)) + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckWorkflowsWorkflowDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkflowsWorkflow_Update(workflowName), + }, + { + Config: testAccWorkflowsWorkflow_Updated(workflowName), + }, + }, + }) +} + +func testAccWorkflowsWorkflow_Update(name string) string { + return fmt.Sprintf(` +resource "google_workflows_workflow" "example" { + name = "%s" + region = "us-central1" + description = "Magic" + call_log_level = "LOG_ERRORS_ONLY" + user_env_vars = { + url = "https://timeapi.io/api/Time/current/zone?timeZone=Europe/Amsterdam" + } + source_contents = <<-EOF + # This is a sample workflow, feel free to replace it with your source code + # + # This workflow does the following: + # - reads current time and date information from an external API and stores + # the response in CurrentDateTime variable + # - retrieves a list of Wikipedia articles related to the day of the week + # from CurrentDateTime + # - returns the list of articles as an output of the workflow + # FYI, In terraform you need to escape the $$ or it will cause errors. + + - getCurrentTime: + call: http.get + args: + url: $${sys.get_env("url")} + result: CurrentDateTime + - readWikipedia: + call: http.get + args: + url: https://en.wikipedia.org/w/api.php + query: + action: opensearch + search: $${CurrentDateTime.body.dayOfTheWeek} + result: WikiResult + - returnOutput: + return: $${WikiResult.body[1]} +EOF +} +`, name) +} + +func testAccWorkflowsWorkflow_Updated(name string) string { + return fmt.Sprintf(` +resource "google_workflows_workflow" "example" { + name = "%s" + region = "us-central1" + description = "Magic" + call_log_level = "LOG_ERRORS_ONLY" + user_env_vars = { + url = "https://timeapi.io/api/Time/current/zone?timeZone=Europe/Amsterdam" + } + source_contents = <<-EOF + # This is a sample workflow, feel free to replace it with your source code + # + # This workflow does the following: + # - reads current time and date information from an external API and stores + # the response in CurrentDateTime variable + # - retrieves a list of Wikipedia articles related to the day of the week + # from CurrentDateTime + # - returns the list of articles as an output of the workflow + # FYI, In terraform you need to escape the $$ or it will cause errors. + + - getCurrentTime: + call: http.get + args: + url: $${sys.get_env("url")} + result: CurrentDateTime + - readWikipedia: + call: http.get + args: + url: https:/fi.wikipedia.org/w/api.php + query: + action: opensearch + search: $${CurrentDateTime.body.dayOfTheWeek} + result: WikiResult + - returnOutput: + return: $${WikiResult.body[1]} +EOF +} +`, name) +} + +func TestWorkflowsWorkflowStateUpgradeV0(t *testing.T) { + t.Parallel() + + cases := map[string]struct { + Attributes map[string]interface{} + Expected map[string]string + Meta interface{} + }{ + "shorten long name": { + Attributes: map[string]interface{}{ + "name": "projects/my-project/locations/us-central1/workflows/my-workflow", + }, + Expected: map[string]string{ + "name": "my-workflow", + }, + Meta: &transport_tpg.Config{}, + }, + "short name stays": { + Attributes: map[string]interface{}{ + "name": "my-workflow", + }, + Expected: map[string]string{ + "name": "my-workflow", + }, + Meta: &transport_tpg.Config{}, + }, + } + for tn, tc := range cases { + t.Run(tn, func(t *testing.T) { + actual, err := workflows.ResourceWorkflowsWorkflowUpgradeV0(context.Background(), tc.Attributes, tc.Meta) + + if err != nil { + t.Error(err) + } + + for _, expectedName := range tc.Expected { + if actual["name"] != expectedName { + t.Errorf("expected: name -> %#v\n got: name -> %#v\n in: %#v", + expectedName, actual["name"], actual) + } + } + }) + } +} + +func TestAccWorkflowsWorkflow_CMEK(t *testing.T) { + // Custom test written to test diffs + t.Parallel() + + workflowName := fmt.Sprintf("tf-test-acc-workflow-%d", acctest.RandInt(t)) + kms := acctest.BootstrapKMSKeyInLocation(t, "us-central1") + if acctest.BootstrapPSARole(t, "service-", "gcp-sa-workflows", "roles/cloudkms.cryptoKeyEncrypterDecrypter") { + t.Fatal("Stopping the test because a role was added to the policy.") + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckWorkflowsWorkflowDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkflowsWorkflow_CMEK(workflowName, kms.CryptoKey.Name), + }, + }, + }) +} + +func testAccWorkflowsWorkflow_CMEK(workflowName, kmsKeyName string) string { + return fmt.Sprintf(` +resource "google_workflows_workflow" "example" { + name = "%s" + region = "us-central1" + description = "Magic" + crypto_key_name = "%s" + source_contents = <<-EOF + # This is a sample workflow, feel free to replace it with your source code + # + # This workflow does the following: + # - reads current time and date information from an external API and stores + # the response in CurrentDateTime variable + # - retrieves a list of Wikipedia articles related to the day of the week + # from CurrentDateTime + # - returns the list of articles as an output of the workflow + # FYI, In terraform you need to escape the $$ or it will cause errors. + + - getCurrentTime: + call: http.get + args: + url: https://us-central1-workflowsample.cloudfunctions.net/datetime + result: CurrentDateTime + - readWikipedia: + call: http.get + args: + url: https:/fi.wikipedia.org/w/api.php + query: + action: opensearch + search: $${CurrentDateTime.body.dayOfTheWeek} + result: WikiResult + - returnOutput: + return: $${WikiResult.body[1]} +EOF +} +`, workflowName, kmsKeyName) +} + diff --git a/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_cluster_test.go.tmpl b/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_cluster_test.go.tmpl new file mode 100644 index 000000000000..66ed1107491b --- /dev/null +++ b/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_cluster_test.go.tmpl @@ -0,0 +1,151 @@ +package workstations_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccWorkstationsWorkstationCluster_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationCluster_workstationClusterBasicExample(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "annotations", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkstationsWorkstationCluster_update(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "annotations", "labels", "terraform_labels"}, + }, + }, + }) +} + +func TestAccWorkstationsWorkstationCluster_Private_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationClusterDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationCluster_workstationClusterPrivateExample(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "annotations", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkstationsWorkstationCluster_private_update(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "annotations", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationCluster_update(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + provider = google-beta +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} +`, context) +} + +func testAccWorkstationsWorkstationCluster_private_update(context map[string]interface{}) string { + return acctest.Nprintf(` +data "google_project" "project" { + provider = google-beta +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + private_cluster_config { + allowed_projects = ["${data.google_project.project.project_id}"] + enable_private_endpoint = true + } + + labels = { + foo = "bar" + } +} + +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_config_test.go.tmpl b/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_config_test.go.tmpl new file mode 100644 index 000000000000..576a21ca1262 --- /dev/null +++ b/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_config_test.go.tmpl @@ -0,0 +1,1295 @@ +package workstations_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccWorkstationsWorkstationConfig_basic(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_basic(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + labels = { + foo = "bar" + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_displayName(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + "display_name": "Display Name N", + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_displayName(context, ""), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkstationsWorkstationConfig_displayName(context, "2"), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_displayName(context map[string]interface{}, update string) string { + context["display_name"] = context["display_name"].(string) + update + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + display_name = "%{display_name} %{random_suffix}" + + labels = { + foo = "bar" + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_persistentDirectories(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_persistentDirectories(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_persistentDirectories(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + persistent_directories { + mount_path = "/home" + } + + labels = { + foo = "bar" + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_ephemeralDirectories(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_ephemeralDirectories(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_ephemeralDirectories(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_compute_disk" "test_source_disk" { + provider = google-beta + name = "tf-test-workstation-source-disk%{random_suffix}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "test_source_snapshot" { + provider = google-beta + name = "tf-test-workstation-source-snapshot%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + zone = "us-central1-a" +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + ephemeral_directories { + mount_path = "/cache" + gce_pd { + source_snapshot = google_compute_snapshot.test_source_snapshot.id + read_only = true + } + } + + labels = { + foo = "bar" + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_ephemeralDirectories_withSourceImage(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_ephemeralDirectories_withSourceImage(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_ephemeralDirectories_withSourceImage(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_compute_disk" "test_source_disk" { + provider = google-beta + name = "tf-test-workstation-source-disk%{random_suffix}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_image" "test_source_image" { + provider = google-beta + name = "tf-test-workstation-source-image%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + storage_locations = ["us-central1"] +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + ephemeral_directories { + mount_path = "/cache" + gce_pd { + disk_type = "pd-standard" + source_image = google_compute_image.test_source_image.id + read_only = true + } + } + + labels = { + foo = "bar" + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_serviceAccount(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_serviceAccount(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_serviceAccount(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false + } + + resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name + } + + resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + } + + resource "google_service_account" "default" { + provider = google-beta + + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Service Account" + } + + resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + enable_audit_agent = true + + host { + gce_instance { + service_account = google_service_account.default.email + service_account_scopes = ["https://www.googleapis.com/auth/cloud-platform"] + } + } + } +`, context) +} + +func TestAccWorkstationsWorkstationConfig_boost(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_boost(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_boost(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false + } + + resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name + } + + resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + } + + resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + host { + gce_instance { + boost_configs { + id = "boost-1" + machine_type = "n1-standard-2" + accelerators { + type = "nvidia-tesla-t4" + count = 1 + } + } + boost_configs { + id = "boost-2" + machine_type = "n1-standard-2" + pool_size = 2 + boot_disk_size_gb = 30 + enable_nested_virtualization = true + } + } + } + } +`, context) +} + +func TestAccWorkstationsWorkstationConfig_disableTcpConnections(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_disableTcpConnections(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_disableTcpConnections(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false + } + + resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name + } + + resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + } + + resource "google_service_account" "default" { + provider = google-beta + + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Service Account" + } + + resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + disable_tcp_connections = true + + host { + gce_instance { + service_account = google_service_account.default.email + service_account_scopes = ["https://www.googleapis.com/auth/cloud-platform"] + } + } + } +`, context) +} + + +func TestAccWorkstationsWorkstationConfig_readinessChecks(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_readinessChecks(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_readinessChecks(context map[string]interface{}) string { + return acctest.Nprintf(` + resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false + } + + resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name + } + + resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + } + + resource "google_service_account" "default" { + provider = google-beta + + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Service Account" + } + + resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + readiness_checks { + path = "/" + port = 80 + } + + host { + gce_instance { + service_account = google_service_account.default.email + service_account_scopes = ["https://www.googleapis.com/auth/cloud-platform"] + } + } + } +`, context) +} + +func TestAccWorkstationsWorkstationConfig_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_workstationConfigBasicExample(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "annotations", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkstationsWorkstationConfig_update(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "annotations", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkstationsWorkstationConfig_workstationConfigBasicExample(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "annotations", "labels", "terraform_labels"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_update(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + host { + gce_instance { + machine_type = "n1-standard-4" + boot_disk_size_gb = 35 + disable_public_ip_addresses = true + enable_nested_virtualization = true + } + } + + labels = { + foo = "bar" + } + + lifecycle { + prevent_destroy = true + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_updateHostDetails(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_updateHostDetailsDefault(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + { + Config: testAccWorkstationsWorkstationConfig_updateHostDetailsUpdated(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + { + Config: testAccWorkstationsWorkstationConfig_updateHostDetailsUnsetInstanceConfigs(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_updateHostDetailsDefault(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +resource "google_service_account" "default" { + provider = google-beta + + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Service Account" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + host { + gce_instance { + machine_type = "e2-standard-2" + boot_disk_size_gb = 35 + pool_size = 0 + + service_account = google_service_account.default.email + disable_public_ip_addresses = false + + shielded_instance_config { + enable_secure_boot = false + enable_vtpm = false + enable_integrity_monitoring = false + } + + confidential_instance_config { + enable_confidential_compute = false + } + } + } +} +`, context) +} + +func testAccWorkstationsWorkstationConfig_updateHostDetailsUpdated(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +# No longer explicitly used in google_workstations_workstation_config resource block below, but the +# service account needs to keep existing to allow the field to default from the API without error +resource "google_service_account" "default" { + provider = google-beta + + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Service Account" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + host { + gce_instance { + machine_type = "n2d-standard-2" + boot_disk_size_gb = 35 + pool_size = 1 + + disable_public_ip_addresses = true + tags = ["foo", "bar"] + + shielded_instance_config { + enable_secure_boot = true + enable_vtpm = true + enable_integrity_monitoring = true + } + + confidential_instance_config { + enable_confidential_compute = true + } + + boost_configs { + id = "boost-1" + machine_type = "n2d-standard-2" + } + } + } +} +`, context) +} + +func testAccWorkstationsWorkstationConfig_updateHostDetailsUnsetInstanceConfigs(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +# No longer explicitly used in google_workstations_workstation_config resource block below, but the +# service account needs to keep existing to allow the field to default from the API without error +resource "google_service_account" "default" { + provider = google-beta + + account_id = "tf-test-my-account%{random_suffix}" + display_name = "Service Account" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + host { + gce_instance { + machine_type = "n2d-standard-2" + boot_disk_size_gb = 35 + pool_size = 1 + + disable_public_ip_addresses = true + tags = ["foo", "bar"] + + shielded_instance_config {} + confidential_instance_config {} + } + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_updateWorkingDir(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_withCustomWorkingDir(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + { + Config: testAccWorkstationsWorkstationConfig_unsetWorkingDir(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_withCustomWorkingDir(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + container { + image = "us-central1-docker.pkg.dev/cloud-workstations-images/predefined/code-oss:latest" + working_dir = "/test" + } +} +`, context) +} + +func testAccWorkstationsWorkstationConfig_unsetWorkingDir(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + container { + image = "us-central1-docker.pkg.dev/cloud-workstations-images/predefined/code-oss:latest" + } +} +`, context) +} + +func TestAccWorkstationsWorkstationConfig_updatePersistentDirectorySourceSnapshot(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderBetaFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationConfigDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstationConfig_withSourceDiskSnapshot(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + { + Config: testAccWorkstationsWorkstationConfig_withUpdatedSourceDiskSnapshot(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag"}, + }, + }, + }) +} + +func testAccWorkstationsWorkstationConfig_withSourceDiskSnapshot(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_compute_disk" "test_source_disk" { + provider = google-beta + name = "tf-test-workstation-source-disk%{random_suffix}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "test_source_snapshot" { + provider = google-beta + name = "tf-test-workstation-source-snapshot%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + zone = "us-central1-a" +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + persistent_directories { + mount_path = "/home" + + gce_pd { + source_snapshot = google_compute_snapshot.test_source_snapshot.id + reclaim_policy = "DELETE" + } + } +} +`, context) +} + +func testAccWorkstationsWorkstationConfig_withUpdatedSourceDiskSnapshot(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + provider = google-beta + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_compute_disk" "test_source_disk" { + provider = google-beta + name = "tf-test-workstation-source-disk%{random_suffix}" + size = 10 + type = "pd-ssd" + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "test_source_snapshot" { + provider = google-beta + name = "tf-test-workstation-source-snapshot%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + zone = "us-central1-a" +} + +resource "google_compute_snapshot" "test_source_snapshot2" { + provider = google-beta + name = "tf-test-workstation-source-snapshot2%{random_suffix}" + source_disk = google_compute_disk.test_source_disk.name + zone = "us-central1-a" +} + +resource "google_workstations_workstation_cluster" "default" { + provider = google-beta + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" +} + +resource "google_workstations_workstation_config" "default" { + provider = google-beta + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + persistent_directories { + mount_path = "/home" + + gce_pd { + source_snapshot = google_compute_snapshot.test_source_snapshot2.id + reclaim_policy = "RETAIN" + } + } +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_test.go.tmpl b/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_test.go.tmpl new file mode 100644 index 000000000000..d92dcc634aad --- /dev/null +++ b/mmv1/third_party/terraform/services/workstations/go/resource_workstations_workstation_test.go.tmpl @@ -0,0 +1,150 @@ +package workstations_test +{{- if ne $.TargetVersionName "ga" }} + +import ( + "testing" + "github.com/hashicorp/terraform-provider-google/google/acctest" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" +) + +func TestAccWorkstationsWorkstation_update(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": acctest.RandString(t, 10), + } + + acctest.VcrTest(t, resource.TestCase{ + PreCheck: func() { acctest.AccTestPreCheck(t) }, + ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t), + CheckDestroy: testAccCheckWorkstationsWorkstationDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccWorkstationsWorkstation_basic(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + { + Config: testAccWorkstationsWorkstation_modified(context), + }, + { + ResourceName: "google_workstations_workstation_cluster.default", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "labels", "terraform_labels"}, + }, + }, + }) +} + + +func testAccWorkstationsWorkstation_basic(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation" "default" { + workstation_id = "tf-test-workstation%{random_suffix}" + workstation_config_id = google_workstations_workstation_config.default.workstation_config_id + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + labels = { + foo = "bar" + } + + env = { + name = "bar" + } +} +`, context) +} + +func testAccWorkstationsWorkstation_modified(context map[string]interface{}) string { + return acctest.Nprintf(` +resource "google_compute_network" "default" { + name = "tf-test-workstation-cluster%{random_suffix}" + auto_create_subnetworks = false +} + +resource "google_compute_subnetwork" "default" { + name = "tf-test-workstation-cluster%{random_suffix}" + ip_cidr_range = "10.0.0.0/24" + region = "us-central1" + network = google_compute_network.default.name +} + +resource "google_workstations_workstation_cluster" "default" { + workstation_cluster_id = "tf-test-workstation-cluster%{random_suffix}" + network = google_compute_network.default.id + subnetwork = google_compute_subnetwork.default.id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation_config" "default" { + workstation_config_id = "tf-test-workstation-config%{random_suffix}" + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + + labels = { + foo = "bar" + } +} + +resource "google_workstations_workstation" "default" { + workstation_id = "tf-test-workstation%{random_suffix}" + workstation_config_id = google_workstations_workstation_config.default.workstation_config_id + workstation_cluster_id = google_workstations_workstation_cluster.default.workstation_cluster_id + location = "us-central1" + display_name = "workstation%{random_suffix}" + + labels = { + foo = "bar" + } + + env = { + name = "test" + } +} +`, context) +} +{{- end }} diff --git a/mmv1/third_party/terraform/tpgiamresource/go/iam.go.tmpl b/mmv1/third_party/terraform/tpgiamresource/go/iam.go.tmpl new file mode 100644 index 000000000000..3222c6adfaee --- /dev/null +++ b/mmv1/third_party/terraform/tpgiamresource/go/iam.go.tmpl @@ -0,0 +1,567 @@ +// Utils for modifying IAM policies for resources across GCP +package tpgiamresource + +import ( + "encoding/json" + "fmt" + "log" + "reflect" + "sort" + "strings" + "time" + + "github.com/hashicorp/terraform-provider-google/google/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" + + "github.com/davecgh/go-spew/spew" + "github.com/hashicorp/errwrap" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "google.golang.org/api/cloudresourcemanager/v1" +) + +const maxBackoffSeconds = 30 +const IamPolicyVersion = 3 + +// These types are implemented per GCP resource type and specify how to do per-resource IAM operations. +// They are used in the generic Terraform IAM resource definitions +// (e.g. _member/_binding/_policy/_audit_config) +type ( + // The ResourceIamUpdater interface is implemented for each GCP resource supporting IAM policy. + // Implementations should be created per resource and should keep track of the resource identifier. + ResourceIamUpdater interface { + // Fetch the existing IAM policy attached to a resource. + GetResourceIamPolicy() (*cloudresourcemanager.Policy, error) + + // Replaces the existing IAM Policy attached to a resource. + SetResourceIamPolicy(policy *cloudresourcemanager.Policy) error + + // A mutex guards against concurrent to call to the SetResourceIamPolicy method. + // The mutex key should be made of the resource type and resource id. + // For example: `iam-project-{id}`. + GetMutexKey() string + + // Returns the unique resource identifier. + GetResourceId() string + + // Textual description of this resource to be used in error message. + // The description should include the unique resource identifier. + DescribeResource() string + } + + // Factory for generating ResourceIamUpdater for given ResourceData resource + NewResourceIamUpdaterFunc func(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (ResourceIamUpdater, error) + + // Describes how to modify a policy for a given Terraform IAM (_policy/_member/_binding/_audit_config) resource + iamPolicyModifyFunc func(p *cloudresourcemanager.Policy) error + + // Parser for Terraform resource identifier (d.Id) for resource whose IAM policy is being changed + ResourceIdParserFunc func(d *schema.ResourceData, config *transport_tpg.Config) error +) + +// Locking wrapper around read-only operation with retries. +func iamPolicyReadWithRetry(updater ResourceIamUpdater) (*cloudresourcemanager.Policy, error) { + mutexKey := updater.GetMutexKey() + transport_tpg.MutexStore.Lock(mutexKey) + defer transport_tpg.MutexStore.Unlock(mutexKey) + + log.Printf("[DEBUG] Retrieving policy for %s\n", updater.DescribeResource()) + var policy *cloudresourcemanager.Policy + err := transport_tpg.Retry(transport_tpg.RetryOptions{ + RetryFunc: func() (perr error) { + policy, perr = updater.GetResourceIamPolicy() + return perr + }, + Timeout: 10 * time.Minute, + }) + if err != nil { + return nil, err + } + log.Print(spew.Sprintf("[DEBUG] Retrieved policy for %s: %#v\n", updater.DescribeResource(), policy)) + return policy, nil +} + +// Locking wrapper around read-modify-write cycle for IAM policy. +func iamPolicyReadModifyWrite(updater ResourceIamUpdater, modify iamPolicyModifyFunc) error { + mutexKey := updater.GetMutexKey() + transport_tpg.MutexStore.Lock(mutexKey) + defer transport_tpg.MutexStore.Unlock(mutexKey) + + backoff := time.Second + for { + log.Printf("[DEBUG]: Retrieving policy for %s\n", updater.DescribeResource()) + p, err := updater.GetResourceIamPolicy() + if transport_tpg.IsGoogleApiErrorWithCode(err, 429) { + log.Printf("[DEBUG] 429 while attempting to read policy for %s, waiting %v before attempting again", updater.DescribeResource(), backoff) + time.Sleep(backoff) + continue + } else if err != nil { + return err + } + log.Printf("[DEBUG]: Retrieved policy for %s: %+v\n", updater.DescribeResource(), p) + + err = modify(p) + if err != nil { + return err + } + + log.Printf("[DEBUG]: Setting policy for %s to %+v\n", updater.DescribeResource(), p) + err = updater.SetResourceIamPolicy(p) + if err == nil { + fetchBackoff := 1 * time.Second + for successfulFetches := 0; successfulFetches < 3; { + if fetchBackoff > maxBackoffSeconds*time.Second { + return fmt.Errorf("Error applying IAM policy to %s: Waited too long for propagation.\n", updater.DescribeResource()) + } + time.Sleep(fetchBackoff) + log.Printf("[DEBUG]: Retrieving policy for %s\n", updater.DescribeResource()) + new_p, err := updater.GetResourceIamPolicy() + if err != nil { + // Quota for Read is pretty limited, so watch out for running out of quota. + if transport_tpg.IsGoogleApiErrorWithCode(err, 429) { + fetchBackoff = fetchBackoff * 2 + } else { + return err + } + } + log.Printf("[DEBUG]: Retrieved policy for %s: %+v\n", updater.DescribeResource(), p) + if new_p == nil { + // https://github.com/hashicorp/terraform-provider-google/issues/2625 + fetchBackoff = fetchBackoff * 2 + continue + } + modified_p := new_p + // This relies on the fact that `modify` is idempotent: since other changes might have + // happened between the call to set the policy and now, we just need to make sure that + // our change has been made. 'modify(p) == p' is our check for whether this has been + // correctly applied. + err = modify(modified_p) + if err != nil { + return err + } + if modified_p == new_p { + successfulFetches += 1 + } else { + fetchBackoff = fetchBackoff * 2 + } + } + break + } + if tpgresource.IsConflictError(err) { + log.Printf("[DEBUG]: Concurrent policy changes, restarting read-modify-write after %s\n", backoff) + time.Sleep(backoff) + backoff = backoff * 2 + if backoff > 30*time.Second { + return errwrap.Wrapf(fmt.Sprintf("Error applying IAM policy to %s: Too many conflicts. Latest error: {{"{{"}}err{{"}}"}}", updater.DescribeResource()), err) + } + continue + } + + // retry in the case that a service account is not found. This can happen when a service account is deleted + // out of band. + if isServiceAccountNotFoundError, _ := transport_tpg.IamServiceAccountNotFound(err); isServiceAccountNotFoundError { + // calling a retryable function within a retry loop is not + // strictly the _best_ idea, but this error only happens in + // high-traffic projects anyways + currentPolicy, rerr := iamPolicyReadWithRetry(updater) + if rerr != nil { + if p.Etag != currentPolicy.Etag { + // not matching indicates that there is a new state to attempt to apply + log.Printf("current and old etag did not match for %s, retrying", updater.DescribeResource()) + time.Sleep(backoff) + backoff = backoff * 2 + continue + } + + log.Printf("current and old etag matched for %s, not retrying", updater.DescribeResource()) + } else { + // if the error is non-nil, just fall through and return the base error + log.Printf("[DEBUG]: error checking etag for policy %s. error: %v", updater.DescribeResource(), rerr) + } + } + + log.Printf("[DEBUG]: not retrying IAM policy for %s. error: %v", updater.DescribeResource(), err) + return errwrap.Wrapf(fmt.Sprintf("Error applying IAM policy for %s: {{"{{"}}err{{"}}"}}", updater.DescribeResource()), err) + } + log.Printf("[DEBUG]: Set policy for %s", updater.DescribeResource()) + return nil +} + +// Flattens a list of Bindings so each role+condition has a single Binding with combined members +func MergeBindings(bindings []*cloudresourcemanager.Binding) []*cloudresourcemanager.Binding { + bm := createIamBindingsMap(bindings) + return listFromIamBindingMap(bm) +} + +type conditionKey struct { + Description string + Expression string + Title string +} + +func conditionKeyFromCondition(condition *cloudresourcemanager.Expr) conditionKey { + if condition == nil { + return conditionKey{} + } + return conditionKey{condition.Description, condition.Expression, condition.Title} +} + +func (k conditionKey) Empty() bool { + return k == conditionKey{} +} + +func (k conditionKey) String() string { + return fmt.Sprintf("%s/%s/%s", k.Title, k.Description, k.Expression) +} + +type iamBindingKey struct { + Role string + Condition conditionKey +} + +// Removes a single role+condition binding from a list of Bindings +func filterBindingsWithRoleAndCondition(b []*cloudresourcemanager.Binding, role string, condition *cloudresourcemanager.Expr) []*cloudresourcemanager.Binding { + bMap := createIamBindingsMap(b) + key := iamBindingKey{role, conditionKeyFromCondition(condition)} + delete(bMap, key) + return listFromIamBindingMap(bMap) +} + +// Removes given role+condition/bound-member pairs from the given Bindings (i.e subtraction). +func subtractFromBindings(bindings []*cloudresourcemanager.Binding, toRemove ...*cloudresourcemanager.Binding) []*cloudresourcemanager.Binding { + currMap := createIamBindingsMap(bindings) + toRemoveMap := createIamBindingsMap(toRemove) + + for key, removeSet := range toRemoveMap { + members, ok := currMap[key] + if !ok { + continue + } + // Remove all removed members + for m := range removeSet { + delete(members, m) + } + // Remove role+condition from bindings + if len(members) == 0 { + delete(currMap, key) + } + } + + return listFromIamBindingMap(currMap) +} + +func iamMemberIsCaseSensitive(member string) bool { + // allAuthenticatedUsers and allUsers are special identifiers that are case sensitive. See: + // https://cloud.google.com/iam/docs/overview#all-authenticated-users + return strings.Contains(member, "allAuthenticatedUsers") || strings.Contains(member, "allUsers") || + strings.HasPrefix(member, "principalSet:") || strings.HasPrefix(member, "principal:") || + strings.HasPrefix(member, "principalHierarchy:") +} + +// normalizeIamMemberCasing returns the case adjusted value of an iamMember +// this is important as iam will ignore casing unless it is one of the following +// member types: principalSet, principal, principalHierarchy +// members are in : format +// is case sensitive +// isn't in most cases +// so lowercase the value unless iamMemberIsCaseSensitive and leave the type alone +// since Dec '19 members can be prefixed with "deleted:" to indicate the principal +// has been deleted +func normalizeIamMemberCasing(member string) string { + var pieces []string + if strings.HasPrefix(member, "deleted:") { + pieces = strings.SplitN(member, ":", 3) + if len(pieces) > 2 && !iamMemberIsCaseSensitive(strings.TrimPrefix(member, "deleted:")) { + pieces[2] = strings.ToLower(pieces[2]) + } + } else if strings.HasPrefix(member, "iamMember:") { + pieces = strings.SplitN(member, ":", 3) + if len(pieces) > 2 && !iamMemberIsCaseSensitive(strings.TrimPrefix(member, "iamMember:")) { + pieces[2] = strings.ToLower(pieces[2]) + } + } else if !iamMemberIsCaseSensitive(member) { + pieces = strings.SplitN(member, ":", 2) + if len(pieces) > 1 { + pieces[1] = strings.ToLower(pieces[1]) + } + } + + if len(pieces) > 0 { + member = strings.Join(pieces, ":") + } + return member +} + +// Construct map of role to set of members from list of bindings. +func createIamBindingsMap(bindings []*cloudresourcemanager.Binding) map[iamBindingKey]map[string]struct{} { + bm := make(map[iamBindingKey]map[string]struct{}) + // Get each binding + for _, b := range bindings { + members := make(map[string]struct{}) + key := iamBindingKey{b.Role, conditionKeyFromCondition(b.Condition)} + // Initialize members map + if _, ok := bm[key]; ok { + members = bm[key] + } + // Get each member (user/principal) for the binding + for _, m := range b.Members { + m = normalizeIamMemberCasing(m) + // Add the member + members[m] = struct{}{} + } + if len(members) > 0 { + bm[key] = members + } else { + delete(bm, key) + } + } + return bm +} + +// Return list of Bindings for a map of role to member sets +func listFromIamBindingMap(bm map[iamBindingKey]map[string]struct{}) []*cloudresourcemanager.Binding { + rb := make([]*cloudresourcemanager.Binding, 0, len(bm)) + var keys []iamBindingKey + for k := range bm { + keys = append(keys, k) + } + sort.Slice(keys, func(i, j int) bool { + keyI := keys[i] + keyJ := keys[j] + return fmt.Sprintf("%s%s", keyI.Role, keyI.Condition.String()) < fmt.Sprintf("%s%s", keyJ.Role, keyJ.Condition.String()) + }) + for _, key := range keys { + members := bm[key] + if len(members) == 0 { + continue + } + b := &cloudresourcemanager.Binding{ + Role: key.Role, + Members: tpgresource.StringSliceFromGolangSet(members), + } + if !key.Condition.Empty() { + b.Condition = &cloudresourcemanager.Expr{ + Description: key.Condition.Description, + Expression: key.Condition.Expression, + Title: key.Condition.Title, + } + } + rb = append(rb, b) + } + return rb +} + +// Flattens AuditConfigs so each role has a single Binding with combined members\ +func removeAllAuditConfigsWithService(ac []*cloudresourcemanager.AuditConfig, service string) []*cloudresourcemanager.AuditConfig { + acMap := createIamAuditConfigsMap(ac) + delete(acMap, service) + return listFromIamAuditConfigMap(acMap) +} + +// Build a AuditConfig service to audit log config map +func createIamAuditConfigsMap(auditConfigs []*cloudresourcemanager.AuditConfig) map[string]map[string]map[string]struct{} { + acMap := make(map[string]map[string]map[string]struct{}) + + for _, ac := range auditConfigs { + if _, ok := acMap[ac.Service]; !ok { + acMap[ac.Service] = make(map[string]map[string]struct{}) + } + alcMap := acMap[ac.Service] + for _, alc := range ac.AuditLogConfigs { + if _, ok := alcMap[alc.LogType]; !ok { + alcMap[alc.LogType] = make(map[string]struct{}) + } + memberMap := alcMap[alc.LogType] + // Add members to map for log type. + for _, m := range alc.ExemptedMembers { + memberMap[m] = struct{}{} + } + } + } + + return acMap +} + +// Construct list of AuditConfigs from audit config maps. +func listFromIamAuditConfigMap(acMap map[string]map[string]map[string]struct{}) []*cloudresourcemanager.AuditConfig { + ac := make([]*cloudresourcemanager.AuditConfig, 0, len(acMap)) + + for service, logConfigMap := range acMap { + if len(logConfigMap) == 0 { + continue + } + + logConfigs := make([]*cloudresourcemanager.AuditLogConfig, 0, len(logConfigMap)) + for logType, memberSet := range logConfigMap { + alc := &cloudresourcemanager.AuditLogConfig{ + LogType: logType, + ForceSendFields: []string{"exemptedMembers"}, + } + if len(memberSet) > 0 { + alc.ExemptedMembers = tpgresource.StringSliceFromGolangSet(memberSet) + } + logConfigs = append(logConfigs, alc) + } + + ac = append(ac, &cloudresourcemanager.AuditConfig{ + Service: service, + AuditLogConfigs: logConfigs, + }) + } + return ac +} + +func jsonPolicyDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + if old == "" && new == "" { + return true + } + + var oldPolicy, newPolicy cloudresourcemanager.Policy + if old != "" && new != "" { + if err := json.Unmarshal([]byte(old), &oldPolicy); err != nil { + log.Printf("[ERROR] Could not unmarshal old policy %s: %v", old, err) + return false + } + if err := json.Unmarshal([]byte(new), &newPolicy); err != nil { + log.Printf("[ERROR] Could not unmarshal new policy %s: %v", new, err) + return false + } + + return compareIamPolicies(&newPolicy, &oldPolicy) + } + + return false +} + +func compareIamPolicies(a, b *cloudresourcemanager.Policy) bool { + if a.Etag != b.Etag { + log.Printf("[DEBUG] policies etag differ: %q vs %q", a.Etag, b.Etag) + return false + } + if a.Version != b.Version { + log.Printf("[DEBUG] policies version differ: %q vs %q", a.Version, b.Version) + return false + } + if !CompareBindings(a.Bindings, b.Bindings) { + log.Printf("[DEBUG] policies bindings differ: %#v vs %#v", a.Bindings, b.Bindings) + return false + } + if !CompareAuditConfigs(a.AuditConfigs, b.AuditConfigs) { + log.Printf("[DEBUG] policies audit configs differ: %#v vs %#v", a.AuditConfigs, b.AuditConfigs) + return false + } + return true +} + +func CompareBindings(a, b []*cloudresourcemanager.Binding) bool { + aMap := createIamBindingsMap(a) + bMap := createIamBindingsMap(b) + return reflect.DeepEqual(aMap, bMap) +} + +func CompareAuditConfigs(a, b []*cloudresourcemanager.AuditConfig) bool { + aMap := createIamAuditConfigsMap(a) + bMap := createIamAuditConfigsMap(b) + return reflect.DeepEqual(aMap, bMap) +} + +type IamSettings struct { + DeprecationMessage string + EnableBatching bool +} + +func NewIamSettings(options ...func(*IamSettings)) *IamSettings { + settings := &IamSettings{} + for _, o := range options { + o(settings) + } + return settings +} + +func IamWithDeprecationMessage(message string) func(s *IamSettings) { + return func(s *IamSettings) { + s.DeprecationMessage = message + } +} + +func IamWithGAResourceDeprecation() func (s *IamSettings) { + {{- if eq $.TargetVersionName "ga" }} + return IamWithDeprecationMessage("This resource has been deprecated in the google (GA) provider, and will only be available in the google-beta provider in a future release.") + {{- else }} + return IamWithDeprecationMessage("") + {{- end }} +} + +func IamWithBatching (s *IamSettings) { + s.EnableBatching = true +} + +// Util to deref and print auditConfigs +func DebugPrintAuditConfigs(bs []*cloudresourcemanager.AuditConfig) string { + v, _ := json.MarshalIndent(bs, "", "\t") + return string(v) +} + +// Util to deref and print bindings +func DebugPrintBindings(bs []*cloudresourcemanager.Binding) string { + v, _ := json.MarshalIndent(bs, "", "\t") + return string(v) +} + +// Returns a map representing iam bindings that are in the first map but not the second. +func missingBindingsMap(aMap, bMap map[iamBindingKey]map[string]struct{}) map[iamBindingKey]map[string]struct{} { + results := make(map[iamBindingKey]map[string]struct{}) + for key, aMembers := range aMap { + if bMembers, ok := bMap[key]; ok { + // The key is in both maps. + resultMembers := make(map[string]struct{}) + + for aMember := range aMembers { + if _, ok := bMembers[aMember]; !ok { + // The member is in a but not in b. + resultMembers[aMember] = struct{}{} + } + } + for bMember := range bMembers { + if _, ok := aMembers[bMember]; !ok { + // The member is in b but not in a. + resultMembers[bMember] = struct{}{} + } + } + + if len(resultMembers) > 0 { + results[key] = resultMembers + } + } else { + // The key is in map a but not map b. + results[key] = aMembers + } + } + + for key, bMembers := range bMap { + if _, ok := aMap[key]; !ok { + // The key is in map b but not map a. + results[key] = bMembers + } + } + + return results +} + +// Returns the bindings that are in the first set of bindings but not the second. +func MissingBindings(a, b []*cloudresourcemanager.Binding) []*cloudresourcemanager.Binding { + aMap := createIamBindingsMap(a) + bMap := createIamBindingsMap(b) + + var results []*cloudresourcemanager.Binding + for key, membersSet := range missingBindingsMap(aMap, bMap) { + members := make([]string, 0, len(membersSet)) + for member := range membersSet { + members = append(members, member) + } + results = append(results, &cloudresourcemanager.Binding{ + Role: key.Role, + Members: members, + }) + } + return results +} diff --git a/mmv1/third_party/terraform/tpgiamresource/go/iam_test.go.tmpl b/mmv1/third_party/terraform/tpgiamresource/go/iam_test.go.tmpl new file mode 100644 index 000000000000..dbf666ea50e4 --- /dev/null +++ b/mmv1/third_party/terraform/tpgiamresource/go/iam_test.go.tmpl @@ -0,0 +1,1333 @@ +package tpgiamresource + +import ( + "reflect" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "google.golang.org/api/cloudresourcemanager/v1" +) + +func TestIamMergeBindings(t *testing.T) { + testCases := []struct { + input []*cloudresourcemanager.Binding + expect []*cloudresourcemanager.Binding + }{ + // Nothing to merge - return same list + { + input: []*cloudresourcemanager.Binding{}, + expect: []*cloudresourcemanager.Binding{}, + }, + // No members returns no binding + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + }, + { + Role: "role-2", + Members: []string{"member-2"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-2", + Members: []string{"member-2"}, + }, + }, + }, + // Nothing to merge - return same list + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1"}, + }, + }, + }, + // Nothing to merge - return same list + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1"}, + }, + { + Role: "role-2", + Members: []string{"member-2"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1"}, + }, + { + Role: "role-2", + Members: []string{"member-2"}, + }, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1"}, + }, + { + Role: "role-1", + Members: []string{"member-2"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + { + Role: "role-1", + Members: []string{"member-3"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2", "member-3"}, + }, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-3", "member-4"}, + }, + { + Role: "role-1", + Members: []string{"member-2", "member-1"}, + }, + { + Role: "role-2", + Members: []string{"member-1"}, + }, + { + Role: "role-1", + Members: []string{"member-5"}, + }, + { + Role: "role-3", + Members: []string{"member-1"}, + }, + { + Role: "role-2", + Members: []string{"member-2"}, + }, + {Role: "empty-role", Members: []string{}}, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2", "member-3", "member-4", "member-5"}, + }, + { + Role: "role-2", + Members: []string{"member-1", "member-2"}, + }, + { + Role: "role-3", + Members: []string{"member-1"}, + }, + }, + }, +{{- if ne $.TargetVersionName "ga" }} + // Same role+members, different condition + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + }, + }, + // Same role, same condition + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + { + Role: "role-1", + Members: []string{"member-3"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2", "member-3"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + }, + }, + // Different roles, same condition + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + { + Role: "role-2", + Members: []string{"member-3"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + { + Role: "role-2", + Members: []string{"member-3"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + }, + }, + }, + }, +{{- end }} + } + + for _, tc := range testCases { + got := MergeBindings(tc.input) + if !CompareBindings(got, tc.expect) { + t.Errorf("Unexpected value for MergeBindings(%s).\nActual: %s\nExpected: %s\n", + DebugPrintBindings(tc.input), DebugPrintBindings(got), DebugPrintBindings(tc.expect)) + } + } +} + +func TestIamFilterBindingsWithRoleAndCondition(t *testing.T) { + testCases := []struct { + input []*cloudresourcemanager.Binding + role string + conditionTitle string + expect []*cloudresourcemanager.Binding + }{ + // No-op + { + input: []*cloudresourcemanager.Binding{}, + role: "role-1", + expect: []*cloudresourcemanager.Binding{}, + }, + // Remove one binding + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + role: "role-1", + expect: []*cloudresourcemanager.Binding{}, + }, + // Remove multiple bindings + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + { + Role: "role-1", + Members: []string{"member-3"}, + }, + }, + role: "role-1", + expect: []*cloudresourcemanager.Binding{}, + }, + // Remove multiple bindings and leave some. + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + { + Role: "role-2", + Members: []string{"member-1"}, + }, + { + Role: "role-3", + Members: []string{"member-1", "member-3"}, + }, + { + Role: "role-1", + Members: []string{"member-2"}, + }, + { + Role: "role-2", + Members: []string{"member-1", "member-2"}, + }, + }, + role: "role-1", + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-2", + Members: []string{"member-1"}, + }, + { + Role: "role-3", + Members: []string{"member-1", "member-3"}, + }, + { + Role: "role-2", + Members: []string{"member-1", "member-2"}, + }, + }, + }, +{{- if ne $.TargetVersionName "ga" }} + // Remove one binding with condition + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + { + Role: "role-1", + Members: []string{"member-3", "member-4"}, + Condition: &cloudresourcemanager.Expr{Title: "condition-1"}, + }, + }, + role: "role-1", + conditionTitle: "condition-1", + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + }, +{{- end }} + } + + for _, tc := range testCases { + got := filterBindingsWithRoleAndCondition(tc.input, tc.role, &cloudresourcemanager.Expr{Title: tc.conditionTitle}) + if !CompareBindings(got, tc.expect) { + t.Errorf("Got unexpected value for removeAllBindingsWithRole(%s, %s).\nActual: %s\nExpected: %s", + DebugPrintBindings(tc.input), tc.role, DebugPrintBindings(got), DebugPrintBindings(tc.expect)) + } + } +} + +func TestIamSubtractFromBindings(t *testing.T) { + testCases := []struct { + input []*cloudresourcemanager.Binding + remove []*cloudresourcemanager.Binding + expect []*cloudresourcemanager.Binding + }{ + { + input: []*cloudresourcemanager.Binding{}, + remove: []*cloudresourcemanager.Binding{}, + expect: []*cloudresourcemanager.Binding{}, + }, + // Empty input should no-op return empty + { + input: []*cloudresourcemanager.Binding{}, + remove: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + expect: []*cloudresourcemanager.Binding{}, + }, + // Empty removal should return original expect + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + remove: []*cloudresourcemanager.Binding{}, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + }, + // Removal not in input should no-op + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-1+"}, + }, + }, + remove: []*cloudresourcemanager.Binding{ + { + Role: "role-2", + Members: []string{"member-2"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-1+"}, + }, + }, + }, + // Same input/remove should return empty + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + remove: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + expect: []*cloudresourcemanager.Binding{}, + }, + // Single removal + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-2"}, + }, + }, + remove: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-2"}, + }, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-2", "member-3"}, + }, + { + Role: "role-2", + Members: []string{"member-1"}, + }, + { + Role: "role-1", + Members: []string{"member-1"}, + }, + { + Role: "role-3", + Members: []string{"member-1"}, + }, + { + Role: "role-2", + Members: []string{"member-2"}, + }, + }, + remove: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-2", "member-4"}, + }, + { + Role: "role-2", + Members: []string{"member-2"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-3"}, + }, + { + Role: "role-2", + Members: []string{"member-1"}, + }, + { + Role: "role-3", + Members: []string{"member-1"}, + }, + }, + }, +{{- if ne $.TargetVersionName "ga" }} + // With conditions + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-2", "member-3"}, + }, + { + Role: "role-2", + Members: []string{"member-1"}, + }, + { + Role: "role-1", + Members: []string{"member-1"}, + }, + { + Role: "role-3", + Members: []string{"member-1"}, + }, + { + Role: "role-2", + Members: []string{"member-1"}, + Condition: &cloudresourcemanager.Expr{Title: "condition-1"}, + }, + }, + remove: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-2", "member-4"}, + }, + { + Role: "role-2", + Members: []string{"member-1"}, + Condition: &cloudresourcemanager.Expr{Title: "condition-1"}, + }, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"member-1", "member-3"}, + }, + { + Role: "role-2", + Members: []string{"member-1"}, + }, + { + Role: "role-3", + Members: []string{"member-1"}, + }, + }, + }, +{{- end }} + } + + for _, tc := range testCases { + got := subtractFromBindings(tc.input, tc.remove...) + if !CompareBindings(got, tc.expect) { + t.Errorf("Unexpected value for subtractFromBindings(%s, %s).\nActual: %s\nExpected: %s\n", + DebugPrintBindings(tc.input), DebugPrintBindings(tc.remove), DebugPrintBindings(got), DebugPrintBindings(tc.expect)) + } + } +} + +func TestIamCreateIamBindingsMap(t *testing.T) { + testCases := []struct { + input []*cloudresourcemanager.Binding + expect map[iamBindingKey]map[string]struct{} + }{ + { + input: []*cloudresourcemanager.Binding{}, + expect: map[iamBindingKey]map[string]struct{}{}, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"user-1", "user-2"}, + }, + }, + expect: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"user-1", "user-2"}, + }, + { + Role: "role-1", + Members: []string{"user-3"}, + }, + }, + expect: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}, "user-3": {}}, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"user-1", "user-2"}, + }, + { + Role: "role-2", + Members: []string{"user-1"}, + }, + }, + expect: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-2", conditionKey{}}: {"user-1": {}}, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"user-1", "user-2"}, + }, + { + Role: "role-2", + Members: []string{"user-1"}, + }, + { + Role: "role-1", + Members: []string{"user-3"}, + }, + { + Role: "role-2", + Members: []string{"user-2"}, + }, + { + Role: "role-3", + Members: []string{"user-3"}, + }, + }, + expect: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}, "user-3": {}}, + {"role-2", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-3", conditionKey{}}: {"user-3": {}}, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"deleted:serviceAccount:useR-1", "user-2"}, + }, + { + Role: "role-2", + Members: []string{"deleted:user:user-1"}, + }, + { + Role: "role-1", + Members: []string{"serviceAccount:user-3"}, + }, + { + Role: "role-2", + Members: []string{"user-2"}, + }, + { + Role: "role-3", + Members: []string{"user-3"}, + }, + { + Role: "role-4", + Members: []string{"deleted:principal:useR-1"}, + }, + }, + expect: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"deleted:serviceAccount:user-1": {}, "user-2": {}, "serviceAccount:user-3": {}}, + {"role-2", conditionKey{}}: {"deleted:user:user-1": {}, "user-2": {}}, + {"role-3", conditionKey{}}: {"user-3": {}}, + {"role-4", conditionKey{}}: {"deleted:principal:useR-1": {}}, + }, + }, + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"principalSet://iam.googleapis.com/projects/1066737951711/locations/global/workloadIdentityPools/example-pool/attribute.aws_role/arn:aws:sts::999999999999:assumed-role/some-eu-central-1-lambdaRole"}, + }, + { + Role: "role-2", + Members: []string{"principal://iam.googleapis.com/projects/1066737951711/locations/global/workloadIdentityPools/example-pool/attribute.aws_role/arn:aws:sts::999999999999:assumed-role/some-eu-central-1-lambdaRole"}, + }, + { + Role: "role-1", + Members: []string{"serviceAccount:useR-3"}, + }, + { + Role: "role-2", + Members: []string{"user-2"}, + }, + { + Role: "role-3", + Members: []string{"user-3"}, + }, + { + Role: "role-3", + Members: []string{"principalHierarchy://iam.googleapis.com/projects/1066737951711/locations/global/workloadIdentityPools"}, + }, + }, + expect: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"principalSet://iam.googleapis.com/projects/1066737951711/locations/global/workloadIdentityPools/example-pool/attribute.aws_role/arn:aws:sts::999999999999:assumed-role/some-eu-central-1-lambdaRole": {}, "serviceAccount:user-3": {}}, + {"role-2", conditionKey{}}: {"principal://iam.googleapis.com/projects/1066737951711/locations/global/workloadIdentityPools/example-pool/attribute.aws_role/arn:aws:sts::999999999999:assumed-role/some-eu-central-1-lambdaRole": {}, "user-2": {}}, + {"role-3", conditionKey{}}: {"principalHierarchy://iam.googleapis.com/projects/1066737951711/locations/global/workloadIdentityPools": {}, "user-3": {}}, + }, + }, +{{- if ne $.TargetVersionName "ga" }} + { + input: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"user-1", "user-2"}, + }, + { + Role: "role-2", + Members: []string{"user-1"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + Description: "condition-1-desc", + Expression: "condition-1-expr", + }, + }, + { + Role: "role-2", + Members: []string{"user-2"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-1", + Description: "condition-1-desc", + Expression: "condition-1-expr", + }, + }, + { + Role: "role-2", + Members: []string{"user-1"}, + Condition: &cloudresourcemanager.Expr{ + Title: "condition-2", + Description: "condition-2-desc", + Expression: "condition-2-expr", + }, + }, + }, + expect: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + { + Role: "role-2", + Condition: conditionKey{ + Title: "condition-1", + Description: "condition-1-desc", + Expression: "condition-1-expr", + }, + }: {"user-1": {}, "user-2": {}}, + { + Role: "role-2", + Condition: conditionKey{ + Title: "condition-2", + Description: "condition-2-desc", + Expression: "condition-2-expr", + }, + }: {"user-1": {}}, + }, + }, +{{- end }} + } + + for _, tc := range testCases { + got := createIamBindingsMap(tc.input) + if !reflect.DeepEqual(got, tc.expect) { + t.Errorf("Unexpected value for createIamBindingsMap(%s).\nActual: %#v\nExpected: %#v\n", + DebugPrintBindings(tc.input), got, tc.expect) + } + } +} + +func TestIamMember_MemberDiffSuppress(t *testing.T) { + type IamMemberTestcase struct { + name string + old string + new string + equal bool + } + var iamMemberTestcases = []IamMemberTestcase{ + { + name: "control", + old: "somevalue", + new: "somevalue", + equal: true, + }, + { + name: "principal same casing", + old: "principal:someValueHere", + new: "principal:someValueHere", + equal: true, + }, + { + name: "principal not same casing", + old: "principal:somevalueHere", + new: "principal:someValuehere", + equal: false, + }, + { + name: "principalSet same casing", + old: "principalSet:someValueHere", + new: "principalSet:someValueHere", + equal: true, + }, + { + name: "principalSet not same casing", + old: "principalSet:somevalueHere", + new: "principalSet:someValuehere", + equal: false, + }, + { + name: "principalHierarchy same casing", + old: "principalHierarchy:someValueHere", + new: "principalHierarchy:someValueHere", + equal: true, + }, + { + name: "principalHierarchy not same casing", + old: "principalHierarchy:somevalueHere", + new: "principalHierarchy:someValuehere", + equal: false, + }, + { + name: "serviceAccount same casing", + old: "serviceAccount:same@case.com", + new: "serviceAccount:same@case.com", + equal: true, + }, + { + name: "serviceAccount diff casing", + old: "serviceAccount:sAme@casE.com", + new: "serviceAccount:same@case.com", + equal: true, + }, + { + name: "random diff", + old: "serviasfsfljJKLSD", + new: "servicsFDJKLSFJdfjdlkfsf", + equal: false, + }, + } + + for _, testcase := range iamMemberTestcases { + areEqual := iamMemberCaseDiffSuppress("", testcase.old, testcase.new, &schema.ResourceData{}) + if areEqual != testcase.equal { + t.Errorf("Testcase %s failed: expected equality to be %t but got %t", testcase.name, testcase.equal, areEqual) + } + } +} + +func TestIamListFromIamBindingMap(t *testing.T) { + testCases := []struct { + input map[iamBindingKey]map[string]struct{} + expect []*cloudresourcemanager.Binding + }{ + { + input: map[iamBindingKey]map[string]struct{}{}, + expect: []*cloudresourcemanager.Binding{}, + }, + { + input: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"user-1", "user-2"}, + }, + }, + }, + { + input: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"user-1": {}}, + {"role-2", conditionKey{}}: {"user-1": {}, "user-2": {}}, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"user-1"}, + }, + { + Role: "role-2", + Members: []string{"user-1", "user-2"}, + }, + }, + }, + { + input: map[iamBindingKey]map[string]struct{}{ + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-2", conditionKey{}}: {}, + }, + expect: []*cloudresourcemanager.Binding{ + { + Role: "role-1", + Members: []string{"user-1", "user-2"}, + }, + }, + }, + } + + for _, tc := range testCases { + got := listFromIamBindingMap(tc.input) + if !CompareBindings(got, tc.expect) { + t.Errorf("Unexpected value for subtractFromBindings(%v).\nActual: %#v\nExpected: %#v\n", + tc.input, DebugPrintBindings(got), DebugPrintBindings(tc.expect)) + } + } +} + +func TestIamRemoveAllAuditConfigsWithService(t *testing.T) { + testCases := []struct { + input []*cloudresourcemanager.AuditConfig + service string + expect []*cloudresourcemanager.AuditConfig + }{ + // No-op + { + service: "foo.googleapis.com", + input: []*cloudresourcemanager.AuditConfig{}, + expect: []*cloudresourcemanager.AuditConfig{}, + }, + // No-op - service not in audit configs + { + service: "bar.googleapis.com", + input: []*cloudresourcemanager.AuditConfig{ + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + }, + }, + }, + }, + expect: []*cloudresourcemanager.AuditConfig{ + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + }, + }, + }, + }, + }, + // Single removal + { + service: "foo.googleapis.com", + input: []*cloudresourcemanager.AuditConfig{ + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + }, + }, + }, + }, + expect: []*cloudresourcemanager.AuditConfig{}, + }, + // Multiple removal/merge + { + service: "kms.googleapis.com", + input: []*cloudresourcemanager.AuditConfig{ + { + Service: "kms.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + }, + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-1"}, + }, + }, + }, + { + Service: "iam.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-1"}, + }, + }, + }, + { + Service: "kms.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-2"}, + }, + }, + }, + { + Service: "iam.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-2"}, + }, + }, + }, + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-1"}, + }, + }, + }, + { + Service: "kms.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-3", "user-4"}, + }, + { + LogType: "DATA_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + }, + }, + }, + expect: []*cloudresourcemanager.AuditConfig{ + { + Service: "iam.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + }, + }, + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-1"}, + }, + }, + }, + }, + }, + } + + for _, tc := range testCases { + got := removeAllAuditConfigsWithService(tc.input, tc.service) + if !CompareAuditConfigs(got, tc.expect) { + t.Errorf("Got unexpected value for removeAllAuditConfigsWithService(%s, %s).\nActual: %s\nExpected: %s", + DebugPrintAuditConfigs(tc.input), tc.service, DebugPrintAuditConfigs(got), DebugPrintAuditConfigs(tc.expect)) + } + } +} + +func TestIamCreateIamAuditConfigsMap(t *testing.T) { + testCases := []struct { + input []*cloudresourcemanager.AuditConfig + expect map[string]map[string]map[string]struct{} + }{ + { + input: []*cloudresourcemanager.AuditConfig{}, + expect: make(map[string]map[string]map[string]struct{}), + }, + { + input: []*cloudresourcemanager.AuditConfig{ + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + }, + }, + }, + }, + expect: map[string]map[string]map[string]struct{}{ + "foo.googleapis.com": { + "ADMIN_READ": map[string]struct{}{}, + }, + }, + }, + { + input: []*cloudresourcemanager.AuditConfig{ + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-1"}, + }, + }, + }, + }, + expect: map[string]map[string]map[string]struct{}{ + "foo.googleapis.com": { + "ADMIN_READ": map[string]struct{}{"user-1": {}, "user-2": {}}, + "DATA_WRITE": map[string]struct{}{"user-1": {}}, + }, + }, + }, + { + input: []*cloudresourcemanager.AuditConfig{ + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-1"}, + }, + }, + }, + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "DATA_READ", + ExemptedMembers: []string{"user-2"}, + }, + }, + }, + }, + expect: map[string]map[string]map[string]struct{}{ + "foo.googleapis.com": { + "ADMIN_READ": map[string]struct{}{"user-1": {}, "user-2": {}}, + "DATA_WRITE": map[string]struct{}{"user-1": {}}, + "DATA_READ": map[string]struct{}{"user-2": {}}, + }, + }, + }, + { + input: []*cloudresourcemanager.AuditConfig{ + { + Service: "kms.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + }, + }, + }, + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-1"}, + }, + }, + }, + { + Service: "kms.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + }, + }, + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "DATA_READ", + ExemptedMembers: []string{"user-2"}, + }, + }, + }, + }, + expect: map[string]map[string]map[string]struct{}{ + "kms.googleapis.com": { + "ADMIN_READ": map[string]struct{}{"user-1": {}, "user-2": {}}, + }, + "foo.googleapis.com": { + "ADMIN_READ": map[string]struct{}{"user-1": {}, "user-2": {}}, + "DATA_WRITE": map[string]struct{}{"user-1": {}}, + "DATA_READ": map[string]struct{}{"user-2": {}}, + }, + }, + }, + } + + for _, tc := range testCases { + got := createIamAuditConfigsMap(tc.input) + if !reflect.DeepEqual(got, tc.expect) { + t.Errorf("Unexpected value for createIamAuditConfigsMap(%s).\nActual: %#v\nExpected: %#v\n", + DebugPrintAuditConfigs(tc.input), got, tc.expect) + } + } +} + +func TestIamListFromIamAuditConfigsMap(t *testing.T) { + testCases := []struct { + input map[string]map[string]map[string]struct{} + expect []*cloudresourcemanager.AuditConfig + }{ + { + input: make(map[string]map[string]map[string]struct{}), + expect: []*cloudresourcemanager.AuditConfig{}, + }, + { + input: map[string]map[string]map[string]struct{}{ + "foo.googleapis.com": {"ADMIN_READ": map[string]struct{}{}}, + }, + expect: []*cloudresourcemanager.AuditConfig{ + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + }, + }, + }, + }, + }, + { + input: map[string]map[string]map[string]struct{}{ + "foo.googleapis.com": { + "ADMIN_READ": map[string]struct{}{"user-1": {}, "user-2": {}}, + "DATA_WRITE": map[string]struct{}{"user-1": {}}, + "DATA_READ": map[string]struct{}{}, + }, + }, + expect: []*cloudresourcemanager.AuditConfig{ + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-1"}, + }, + { + LogType: "DATA_READ", + }, + }, + }, + }, + }, + { + input: map[string]map[string]map[string]struct{}{ + "kms.googleapis.com": { + "ADMIN_READ": map[string]struct{}{}, + "DATA_READ": map[string]struct{}{"user-1": {}, "user-2": {}}, + }, + "foo.googleapis.com": { + "ADMIN_READ": map[string]struct{}{"user-1": {}, "user-2": {}}, + "DATA_WRITE": map[string]struct{}{"user-1": {}}, + "DATA_READ": map[string]struct{}{"user-2": {}}, + }, + }, + expect: []*cloudresourcemanager.AuditConfig{ + { + Service: "kms.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + }, + { + LogType: "DATA_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + }, + }, + { + Service: "foo.googleapis.com", + AuditLogConfigs: []*cloudresourcemanager.AuditLogConfig{ + { + LogType: "ADMIN_READ", + ExemptedMembers: []string{"user-1", "user-2"}, + }, + { + LogType: "DATA_WRITE", + ExemptedMembers: []string{"user-1"}, + }, + { + LogType: "DATA_READ", + ExemptedMembers: []string{"user-2"}, + }, + }, + }, + }, + }, + } + + for _, tc := range testCases { + got := listFromIamAuditConfigMap(tc.input) + if !CompareAuditConfigs(got, tc.expect) { + t.Errorf("Unexpected value for listFromIamAuditConfigMap(%+v).\nActual: %s\nExpected: %s\n", + tc.input, DebugPrintAuditConfigs(got), DebugPrintAuditConfigs(tc.expect)) + } + } +} diff --git a/mmv1/third_party/terraform/tpgiamresource/iam_test.go.erb b/mmv1/third_party/terraform/tpgiamresource/iam_test.go.erb index 0a058746f456..687f64293a54 100644 --- a/mmv1/third_party/terraform/tpgiamresource/iam_test.go.erb +++ b/mmv1/third_party/terraform/tpgiamresource/iam_test.go.erb @@ -598,7 +598,7 @@ func TestIamCreateIamBindingsMap(t *testing.T) { }, }, expect: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, }, }, { @@ -613,7 +613,7 @@ func TestIamCreateIamBindingsMap(t *testing.T) { }, }, expect: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}, "user-3": {}}, + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}, "user-3": {}}, }, }, { @@ -628,8 +628,8 @@ func TestIamCreateIamBindingsMap(t *testing.T) { }, }, expect: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, - iamBindingKey{"role-2", conditionKey{}}: {"user-1": {}}, + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-2", conditionKey{}}: {"user-1": {}}, }, }, { @@ -656,9 +656,9 @@ func TestIamCreateIamBindingsMap(t *testing.T) { }, }, expect: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}, "user-3": {}}, - iamBindingKey{"role-2", conditionKey{}}: {"user-1": {}, "user-2": {}}, - iamBindingKey{"role-3", conditionKey{}}: {"user-3": {}}, + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}, "user-3": {}}, + {"role-2", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-3", conditionKey{}}: {"user-3": {}}, }, }, { @@ -689,10 +689,10 @@ func TestIamCreateIamBindingsMap(t *testing.T) { }, }, expect: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"deleted:serviceAccount:user-1": {}, "user-2": {}, "serviceAccount:user-3": {}}, - iamBindingKey{"role-2", conditionKey{}}: {"deleted:user:user-1": {}, "user-2": {}}, - iamBindingKey{"role-3", conditionKey{}}: {"user-3": {}}, - iamBindingKey{"role-4", conditionKey{}}: {"deleted:principal:useR-1": {}}, + {"role-1", conditionKey{}}: {"deleted:serviceAccount:user-1": {}, "user-2": {}, "serviceAccount:user-3": {}}, + {"role-2", conditionKey{}}: {"deleted:user:user-1": {}, "user-2": {}}, + {"role-3", conditionKey{}}: {"user-3": {}}, + {"role-4", conditionKey{}}: {"deleted:principal:useR-1": {}}, }, }, { @@ -764,8 +764,8 @@ func TestIamCreateIamBindingsMap(t *testing.T) { }, }, expect: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, - iamBindingKey{ + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + { Role: "role-2", Condition: conditionKey{ Title: "condition-1", @@ -773,7 +773,7 @@ func TestIamCreateIamBindingsMap(t *testing.T) { Expression: "condition-1-expr", }, }: {"user-1": {}, "user-2": {}}, - iamBindingKey{ + { Role: "role-2", Condition: conditionKey{ Title: "condition-2", @@ -884,7 +884,7 @@ func TestIamListFromIamBindingMap(t *testing.T) { }, { input: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, }, expect: []*cloudresourcemanager.Binding{ { @@ -895,8 +895,8 @@ func TestIamListFromIamBindingMap(t *testing.T) { }, { input: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"user-1": {}}, - iamBindingKey{"role-2", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-1", conditionKey{}}: {"user-1": {}}, + {"role-2", conditionKey{}}: {"user-1": {}, "user-2": {}}, }, expect: []*cloudresourcemanager.Binding{ { @@ -911,8 +911,8 @@ func TestIamListFromIamBindingMap(t *testing.T) { }, { input: map[iamBindingKey]map[string]struct{}{ - iamBindingKey{"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, - iamBindingKey{"role-2", conditionKey{}}: {}, + {"role-1", conditionKey{}}: {"user-1": {}, "user-2": {}}, + {"role-2", conditionKey{}}: {}, }, expect: []*cloudresourcemanager.Binding{ { diff --git a/mmv1/third_party/terraform/tpgresource/go/common_diff_suppress.go.tmpl b/mmv1/third_party/terraform/tpgresource/go/common_diff_suppress.go.tmpl new file mode 100644 index 000000000000..acf087a314f0 --- /dev/null +++ b/mmv1/third_party/terraform/tpgresource/go/common_diff_suppress.go.tmpl @@ -0,0 +1,315 @@ +// Contains common diff suppress functions. + +package tpgresource + +import ( + "crypto/sha256" + "log" + "encoding/hex" + "net" + "reflect" + "regexp" + "strconv" + "strings" + "time" + "bytes" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func OptionalPrefixSuppress(prefix string) schema.SchemaDiffSuppressFunc { + return func(k, old, new string, d *schema.ResourceData) bool { + return prefix+old == new || prefix+new == old + } +} + +func IgnoreMissingKeyInMap(key string) schema.SchemaDiffSuppressFunc { + return func(k, old, new string, d *schema.ResourceData) bool { + log.Printf("[DEBUG] - suppressing diff %q with old %q, new %q", k, old, new) + if strings.HasSuffix(k, ".%") { + oldNum, err := strconv.Atoi(old) + if err != nil { + log.Printf("[ERROR] could not parse %q as number, no longer attempting diff suppress", old) + return false + } + newNum, err := strconv.Atoi(new) + if err != nil { + log.Printf("[ERROR] could not parse %q as number, no longer attempting diff suppress", new) + return false + } + return oldNum+1 == newNum + } else if strings.HasSuffix(k, "." + key) { + return old == "" + } + return false + } +} + +func OptionalSurroundingSpacesSuppress(k, old, new string, d *schema.ResourceData) bool { + return strings.TrimSpace(old) == strings.TrimSpace(new) +} + +func EmptyOrDefaultStringSuppress(defaultVal string) schema.SchemaDiffSuppressFunc { + return func(k, old, new string, d *schema.ResourceData) bool { + return (old == "" && new == defaultVal) || (new == "" && old == defaultVal) + } +} + +func EmptyOrFalseSuppressBoolean(k, old, new string, d *schema.ResourceData) bool { + o, n := d.GetChange(k) + return (o == nil && !n.(bool)) +} + +func IpCidrRangeDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + // The range may be a: + // A) single IP address (e.g. 10.2.3.4) + // B) CIDR format string (e.g. 10.1.2.0/24) + // C) netmask (e.g. /24) + // + // For A) and B), no diff to suppress, they have to match completely. + // For C), The API picks a network IP address and this creates a diff of the form: + // network_interface.0.alias_ip_range.0.ip_cidr_range: "10.128.1.0/24" => "/24" + // We should only compare the mask portion for this case. + if len(new) > 0 && new[0] == '/' { + oldNetmaskStartPos := strings.LastIndex(old, "/") + + if oldNetmaskStartPos != -1 { + oldNetmask := old[strings.LastIndex(old, "/"):] + if oldNetmask == new { + return true + } + } + } + + return false +} + +// Sha256DiffSuppress +// if old is the hex-encoded sha256 sum of new, treat them as equal +func Sha256DiffSuppress(_, old, new string, _ *schema.ResourceData) bool { + return hex.EncodeToString(sha256.New().Sum([]byte(old))) == new +} + +func CaseDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { + return strings.ToUpper(old) == strings.ToUpper(new) +} + +// Port range '80' and '80-80' is equivalent. +// `old` is read from the server and always has the full range format (e.g. '80-80', '1024-2048'). +// `new` can be either a single port or a port range. +func PortRangeDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + return old == new+"-"+new +} + +// Single-digit hour is equivalent to hour with leading zero e.g. suppress diff 1:00 => 01:00. +// Assume either value could be in either format. +func Rfc3339TimeDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + if (len(old) == 4 && "0"+old == new) || (len(new) == 4 && "0"+new == old) { + return true + } + return false +} + +func EmptyOrUnsetBlockDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + o, n := d.GetChange(strings.TrimSuffix(k, ".#")) + return EmptyOrUnsetBlockDiffSuppressLogic(k, old, new, o, n) +} + +// The core logic for EmptyOrUnsetBlockDiffSuppress, in a format that is more conducive +// to unit testing. +func EmptyOrUnsetBlockDiffSuppressLogic(k, old, new string, o, n interface{}) bool { + if !strings.HasSuffix(k, ".#") { + return false + } + var l []interface{} + if old == "0" && new == "1" { + l = n.([]interface{}) + } else if new == "0" && old == "1" { + l = o.([]interface{}) + } else { + // we don't have one set and one unset, so don't suppress the diff + return false + } + + contents, ok := l[0].(map[string]interface{}) + if !ok { + return false + } + for _, v := range contents { + if !IsEmptyValue(reflect.ValueOf(v)) { + return false + } + } + return true +} + +// Suppress diffs for values that are equivalent except for their use of the words "location" +// compared to "region" or "zone" +func LocationDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + return LocationDiffSuppressHelper(old, new) || LocationDiffSuppressHelper(new, old) +} + +func LocationDiffSuppressHelper(a, b string) bool { + return strings.Replace(a, "/locations/", "/regions/", 1) == b || + strings.Replace(a, "/locations/", "/zones/", 1) == b +} + +// For managed SSL certs, if new is an absolute FQDN (trailing '.') but old isn't, treat them as equals. +func AbsoluteDomainSuppress(k, old, new string, _ *schema.ResourceData) bool { + if strings.HasPrefix(k, "managed.0.domains.") { + return old == strings.TrimRight(new, ".") || new == strings.TrimRight(old, ".") + } + return false +} + +func TimestampDiffSuppress(format string) schema.SchemaDiffSuppressFunc { + return func(_, old, new string, _ *schema.ResourceData) bool { + oldT, err := time.Parse(format, old) + if err != nil { + return false + } + + newT, err := time.Parse(format, new) + if err != nil { + return false + } + + return oldT == newT + } +} + +// Suppresses diff for IPv4 and IPv6 different formats. +// It also suppresses diffs if an IP is changing to a reference. +func InternalIpDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { + addr_equality := false + netmask_equality := false + + addr_netmask_old := strings.Split(old, "/") + addr_netmask_new := strings.Split(new, "/") + + // Check if old or new are IPs (with or without netmask) + var addr_old net.IP + if net.ParseIP(addr_netmask_old[0]) == nil { + addr_old = net.ParseIP(old) + } else { + addr_old = net.ParseIP(addr_netmask_old[0]) + } + var addr_new net.IP + if net.ParseIP(addr_netmask_new[0]) == nil { + addr_new = net.ParseIP(new) + } else { + addr_new = net.ParseIP(addr_netmask_new[0]) + } + + if addr_old != nil { + if addr_new == nil { + // old is an IP and new is a reference + addr_equality = true + } else { + // old and new are IP addresses + addr_equality = bytes.Equal(addr_old, addr_new) + } + } + + // If old and new both have a netmask compare them, otherwise suppress + // This is not technically correct but prevents the permadiff described in https://github.com/hashicorp/terraform-provider-google/issues/16400 + if (len(addr_netmask_old)) == 2 && (len(addr_netmask_new) == 2) { + netmask_equality = addr_netmask_old[1] == addr_netmask_new[1] + } else { + netmask_equality = true + } + + return addr_equality && netmask_equality +} + +// Suppress diffs for duration format. ex "60.0s" and "60s" same +// https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#duration +func DurationDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + oDuration, err := time.ParseDuration(old) + if err != nil { + return false + } + nDuration, err := time.ParseDuration(new) + if err != nil { + return false + } + return oDuration == nDuration +} + +// Use this method when the field accepts either an IP address or a +// self_link referencing a resource (such as google_compute_route's +// next_hop_ilb) +func CompareIpAddressOrSelfLinkOrResourceName(_, old, new string, _ *schema.ResourceData) bool { + // if we can parse `new` as an IP address, then compare as strings + if net.ParseIP(new) != nil { + return new == old + } + + // otherwise compare as self links + return CompareSelfLinkOrResourceName("", old, new, nil) +} + +{{ if ne $.TargetVersionName `ga` -}} +// Suppress all diffs, used for Disk.Interface which is a nonfunctional field +func AlwaysDiffSuppress(_, _, _ string, _ *schema.ResourceData) bool { + return true +} +{{- end }} + +// Use this method when subnet is optioanl and auto_create_subnetworks = true +// API sometimes choose a subnet so the diff needs to be ignored +func CompareOptionalSubnet(_, old, new string, _ *schema.ResourceData) bool { + if IsEmptyValue(reflect.ValueOf(new)) { + return true + } + // otherwise compare as self links + return CompareSelfLinkOrResourceName("", old, new, nil) +} + +// Suppress diffs in below cases +// "https://hello-rehvs75zla-uc.a.run.app/" -> "https://hello-rehvs75zla-uc.a.run.app" +// "https://hello-rehvs75zla-uc.a.run.app" -> "https://hello-rehvs75zla-uc.a.run.app/" +func LastSlashDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { + if last := len(new) - 1; last >= 0 && new[last] == '/' { + new = new[:last] + } + + if last := len(old) - 1; last >= 0 && old[last] == '/' { + old = old[:last] + } + return new == old +} + +// Suppress diffs when the value read from api +// has the project number instead of the project name +func ProjectNumberDiffSuppress(_, old, new string, _ *schema.ResourceData) bool { + var a2, b2 string + reN := regexp.MustCompile("projects/\\d+") + re := regexp.MustCompile("projects/[^/]+") + replacement := []byte("projects/equal") + a2 = string(reN.ReplaceAll([]byte(old), replacement)) + b2 = string(re.ReplaceAll([]byte(new), replacement)) + return a2 == b2 +} + +func IsNewResource(diff TerraformResourceDiff) bool { + name := diff.Get("name") + return name.(string) == "" +} + +func CompareCryptoKeyVersions(_, old, new string, _ *schema.ResourceData) bool { + // The API can return cryptoKeyVersions even though it wasn't specified. + // format: projects//locations//keyRings//cryptoKeys//cryptoKeyVersions/1 + + kmsKeyWithoutVersions := strings.Split(old, "/cryptoKeyVersions")[0] + if kmsKeyWithoutVersions == new { + return true + } + + return false +} + +func CidrOrSizeDiffSuppress(k, old, new string, d *schema.ResourceData) bool { + // If the user specified a size and the API returned a full cidr block, suppress. + return strings.HasPrefix(new, "/") && strings.HasSuffix(old, new) +} \ No newline at end of file diff --git a/mmv1/third_party/terraform/tpgresource/go/tpgtools_custom_flattens.go.tmpl b/mmv1/third_party/terraform/tpgresource/go/tpgtools_custom_flattens.go.tmpl new file mode 100644 index 000000000000..8c32d4eca8e3 --- /dev/null +++ b/mmv1/third_party/terraform/tpgresource/go/tpgtools_custom_flattens.go.tmpl @@ -0,0 +1,39 @@ +package tpgresource + +import ( + containeraws "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/containeraws{{ $.DCLVersion }}" + containerazure "github.com/GoogleCloudPlatform/declarative-resource-client-library/services/google/containerazure{{ $.DCLVersion }}" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport" +) + +func FlattenContainerAwsNodePoolManagement(obj *containeraws.NodePoolManagement, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if obj == nil { + return nil + } + transformed := make(map[string]interface{}) + + if obj.AutoRepair == nil || obj.Empty() { + transformed["auto_repair"] = false + } else { + transformed["auto_repair"] = obj.AutoRepair + } + + return []interface{}{transformed} +} + +func FlattenContainerAzureNodePoolManagement(obj *containerazure.NodePoolManagement, d *schema.ResourceData, config *transport_tpg.Config) interface{} { + if obj == nil { + return nil + } + transformed := make(map[string]interface{}) + + if obj.AutoRepair == nil || obj.Empty() { + transformed["auto_repair"] = false + } else { + transformed["auto_repair"] = obj.AutoRepair + } + + return []interface{}{transformed} +} diff --git a/mmv1/third_party/terraform/tpgresource/utils.go b/mmv1/third_party/terraform/tpgresource/utils.go index 4d16bc9a15eb..a86a88d98a0e 100644 --- a/mmv1/third_party/terraform/tpgresource/utils.go +++ b/mmv1/third_party/terraform/tpgresource/utils.go @@ -58,15 +58,14 @@ type TerraformResourceDiff interface { // Contains functions that don't really belong anywhere else. // GetRegionFromZone returns the region from a zone for Google cloud. -// This is by removing the last two chars from the zone name to leave the region -// If there aren't enough characters in the input string, an empty string is returned +// This is by removing the characters after the last '-'. // e.g. southamerica-west1-a => southamerica-west1 func GetRegionFromZone(zone string) string { - if zone != "" && len(zone) > 2 { - region := zone[:len(zone)-2] - return region + zoneParts := strings.Split(zone, "-") + if len(zoneParts) < 3 { + return "" } - return "" + return strings.Join(zoneParts[:len(zoneParts)-1], "-") } // Infers the region based on the following (in order of priority): diff --git a/mmv1/third_party/terraform/transport/go/provider_handwritten_endpoint.go.tmpl b/mmv1/third_party/terraform/transport/go/provider_handwritten_endpoint.go.tmpl new file mode 100644 index 000000000000..f42ba038e969 --- /dev/null +++ b/mmv1/third_party/terraform/transport/go/provider_handwritten_endpoint.go.tmpl @@ -0,0 +1,131 @@ +package transport + +import ( + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-provider-google/google/verify" +) + +// For generated resources, endpoint entries live in product-specific provider +// files. Collect handwritten ones here. If any of these are modified, be sure +// to update the provider_reference docs page. + +var CloudBillingCustomEndpointEntryKey = "cloud_billing_custom_endpoint" +var CloudBillingCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var ComposerCustomEndpointEntryKey = "composer_custom_endpoint" +var ComposerCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var ContainerCustomEndpointEntryKey = "container_custom_endpoint" +var ContainerCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var DataflowCustomEndpointEntryKey = "dataflow_custom_endpoint" +var DataflowCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var IAMCustomEndpointEntryKey = "iam_custom_endpoint" +var IAMCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var IamCredentialsCustomEndpointEntryKey = "iam_credentials_custom_endpoint" +var IamCredentialsCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var ResourceManagerV3CustomEndpointEntryKey = "resource_manager_v3_custom_endpoint" +var ResourceManagerV3CustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +{{ if ne $.TargetVersionName `ga` -}} +var RuntimeConfigCustomEndpointEntryKey = "runtimeconfig_custom_endpoint" +var RuntimeConfigCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} +{{- end }} + +var ServiceNetworkingCustomEndpointEntryKey = "service_networking_custom_endpoint" +var ServiceNetworkingCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var ServiceUsageCustomEndpointEntryKey = "service_usage_custom_endpoint" +var ServiceUsageCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, + DefaultFunc: schema.MultiEnvDefaultFunc([]string{ + "GOOGLE_SERVICE_USAGE_CUSTOM_ENDPOINT", + }, DefaultBasePaths[ServiceUsageBasePathKey]), +} + +var BigtableAdminCustomEndpointEntryKey = "bigtable_custom_endpoint" +var BigtableAdminCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, + DefaultFunc: schema.MultiEnvDefaultFunc([]string{ + "GOOGLE_BIGTABLE_CUSTOM_ENDPOINT", + }, DefaultBasePaths[BigtableAdminBasePathKey]), +} + +var PrivatecaCertificateTemplateEndpointEntryKey = "privateca_custom_endpoint" +var PrivatecaCertificateTemplateCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, + DefaultFunc: schema.MultiEnvDefaultFunc([]string{ + "GOOGLE_PRIVATECA_CUSTOM_ENDPOINT", + }, DefaultBasePaths[PrivatecaBasePathKey]), +} + +var ContainerAwsCustomEndpointEntryKey = "container_aws_custom_endpoint" +var ContainerAwsCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var ContainerAzureCustomEndpointEntryKey = "container_azure_custom_endpoint" +var ContainerAzureCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +var TagsLocationCustomEndpointEntryKey = "tags_location_custom_endpoint" +var TagsLocationCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: ValidateCustomEndpoint, +} + +func ValidateCustomEndpoint(v interface{}, k string) (ws []string, errors []error) { + re := `.*/[^/]+/$` + return verify.ValidateRegexp(re)(v, k) +} diff --git a/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_image.html.markdown b/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_image.html.markdown index 4065bd033515..62bed15ebcd0 100644 --- a/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_image.html.markdown +++ b/mmv1/third_party/terraform/website/docs/d/artifact_registry_docker_image.html.markdown @@ -21,9 +21,9 @@ resource "google_artifact_registry_repository" "my_repo" { } data "google_artifact_registry_docker_image" "my_image" { - repository = google_artifact_registry_repository.my_repo.id - image = "my-image" - tag = "my-tag" + location = google_artifact_registry_repository.my_repo.location + repository_id = google_artifact_registry_repository.my_repo.repository_id + image = "my-image:my-tag" } resource "google_cloud_run_v2_service" "default" { @@ -43,7 +43,7 @@ The following arguments are supported: * `location` - (Required) The location of the artifact registry. -* `repository_id` - (Required) The last part of the repository name. to fetch from. +* `repository_id` - (Required) The last part of the repository name to fetch from. * `image_name` - (Required) The image name to fetch. If no digest or tag is provided, then the latest modified image will be used. diff --git a/mmv1/third_party/terraform/website/docs/guides/getting_started.html.markdown b/mmv1/third_party/terraform/website/docs/guides/getting_started.html.markdown index 8ecb74a85217..346d72b6c7d8 100644 --- a/mmv1/third_party/terraform/website/docs/guides/getting_started.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/getting_started.html.markdown @@ -100,23 +100,23 @@ are used as a short way to identify resources, and a resource's display name in the Cloud Console will be the one defined in the `name` field. When linking resources in a Terraform config though, you'll primarily want to -use a different field, the `self_link` of a resource. Like `name`, nearly every -resource has a `self_link`. They look like: +use a different field, the `id` of a resource. Every Terraform resource has an +`id`. In the Google provider they generally look like: ``` -{{API base url}}/projects/{{your project}}/{{location type}}/{{location}}/{{resource type}}/{{name}} +projects/{{your project}}/{{location type}}/{{location}}/{{resource type}}/{{name}} ``` For example, the instance defined earlier in a project named `foo` will have -the `self_link`: +the `id`: ``` -https://www.googleapis.com/compute/v1/projects/foo/zones/us-central1-c/instances/terraform-instance +projects/foo/zones/us-central1-c/instances/terraform-instance ``` -A resource's `self_link` is a unique reference to that resource. When +A resource's `id` is a unique reference to that resource. When linking two resources in Terraform, you can use Terraform interpolation to -avoid typing out the self link! Let's use a `google_compute_network` to +avoid typing out the id! Let's use a `google_compute_network` to demonstrate. Add this block to your config: @@ -136,7 +136,7 @@ with a subnetwork in each region. Next, change the network of the network_interface { - # A default network is created for all GCP projects - network = "default" -+ network = google_compute_network.vpc_network.self_link ++ network = google_compute_network.vpc_network.id access_config { ``` @@ -211,7 +211,7 @@ resource "google_compute_instance" "vm_instance" { network_interface { # A default network is created for all GCP projects - network = google_compute_network.vpc_network.self_link + network = google_compute_network.vpc_network.id access_config { } } @@ -237,7 +237,7 @@ a virtual machine on Google Cloud Platform. The key concepts unique to GCP are: * and how to use a default `project` in your provider * What a resource being global, regional, or zonal means on GCP * and how to specify a default `region` and `zone` -* How GCP uses `name` and `self_link` to identify resources +* How GCP uses `name` and `id` to identify resources * How to add GCP service account credentials to Terraform Run `terraform destroy` to tear down your resources. diff --git a/mmv1/third_party/terraform/website/docs/guides/version_6_upgrade.html.markdown b/mmv1/third_party/terraform/website/docs/guides/version_6_upgrade.html.markdown index b0530361aeb0..a32f7fa8fb58 100644 --- a/mmv1/third_party/terraform/website/docs/guides/version_6_upgrade.html.markdown +++ b/mmv1/third_party/terraform/website/docs/guides/version_6_upgrade.html.markdown @@ -1,7 +1,7 @@ --- -page_title: "Terraform Google Provider 6.0.0 Upgrade Guide" +page_title: "Terraform provider for Google Cloud 6.0.0 Upgrade Guide" description: |- - Terraform Google Provider 6.0.0 Upgrade Guide + Terraform provider for Google Cloud 6.0.0 Upgrade Guide --- # Terraform Google Provider 6.0.0 Upgrade Guide diff --git a/mmv1/third_party/terraform/website/docs/r/bigtable_gc_policy.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigtable_gc_policy.html.markdown index 135345f0891a..b41494cd49a4 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigtable_gc_policy.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigtable_gc_policy.html.markdown @@ -166,6 +166,9 @@ The following arguments are supported: Possible values are: `ABANDON`. +* `ignore_warnings` - (Optional) Boolean for whether to allow ignoring warnings when updating the gc policy. + Setting this to `true` allows relaxing the gc policy for replicated clusters by up to 90 days, but keep in mind this may increase how long clusters are inconsistent. Make sure + you understand the risks listed at https://cloud.google.com/bigtable/docs/garbage-collection#increasing before setting this option. ----- `max_age` supports the following arguments: diff --git a/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown b/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown index 330f9e76b800..31735da2cccb 100644 --- a/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/composer_environment.html.markdown @@ -47,6 +47,11 @@ will not be able to find or manage many of these underlying resources automatica resource "google_composer_environment" "test" { name = "example-composer-env" region = "us-central1" + config { + software_config { + image_version = "composer-1-airflow-2" + } + } } ``` @@ -415,14 +420,40 @@ The following arguments are supported: They cannot specify Apache Airflow software configuration overrides (they cannot match the regular expression `AIRFLOW__[A-Z0-9_]+__[A-Z0-9_]+`), and they cannot match any of the following reserved names: ``` + AIRFLOW_DATABASE_VERSION AIRFLOW_HOME - C_FORCE_ROOT + AIRFLOW_SRC_DIR + AIRFLOW_WEBSERVER + AUTO_GKE + CLOUDSDK_METRICS_ENVIRONMENT + CLOUD_LOGGING_ONLY + COMPOSER_ENVIRONMENT + COMPOSER_GKE_LOCATION + COMPOSER_GKE_NAME + COMPOSER_GKE_ZONE + COMPOSER_LOCATION + COMPOSER_OPERATION_UUID + COMPOSER_PYTHON_VERSION + COMPOSER_VERSION CONTAINER_NAME + C_FORCE_ROOT DAGS_FOLDER GCP_PROJECT + GCP_TENANT_PROJECT + GCSFUSE_EXTRACTED GCS_BUCKET GKE_CLUSTER_NAME + GKE_IN_TENANT + GOOGLE_APPLICATION_CREDENTIALS + MAJOR_VERSION + MINOR_VERSION + PATH + PIP_DISABLE_PIP_VERSION_CHECK + PORT + PROJECT_ID + PYTHONPYCACHEPREFIX SQL_DATABASE + SQL_HOST SQL_INSTANCE SQL_PASSWORD SQL_PROJECT diff --git a/mmv1/third_party/tgc/logging_billing_account_bucket_config.go b/mmv1/third_party/tgc/logging_billing_account_bucket_config.go new file mode 100644 index 000000000000..741e11a59bb5 --- /dev/null +++ b/mmv1/third_party/tgc/logging_billing_account_bucket_config.go @@ -0,0 +1,175 @@ +package google + +import ( + "reflect" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v5/tfplan2cai/converters/google/resources/cai" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" +) + +const logBillingAccountBucketAssetType string = "logging.googleapis.com/LogBucket" + +func resourceConverterLogBillingAccountBucket() cai.ResourceConverter { + return cai.ResourceConverter{ + AssetType: logBillingAccountBucketAssetType, + Convert: GetLogBillingAccountBucketCaiObject, + } +} + +func GetLogBillingAccountBucketCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]cai.Asset, error) { + name, err := cai.AssetName(d, config, "//logging.googleapis.com/projects/{{project}}/locations/{{location}}/buckets/{{bucket_id}}") + if err != nil { + return []cai.Asset{}, err + } + if obj, err := GetLogBillingAccountBucketApiObject(d, config); err == nil { + return []cai.Asset{{ + Name: name, + Type: logBillingAccountBucketAssetType, + Resource: &cai.AssetResource{ + Version: "v2", + DiscoveryDocumentURI: "https://logging.googleapis.com/$discovery/rest?version=v2", + DiscoveryName: "LogBucket", + Data: obj, + }, + }}, nil + } else { + return []cai.Asset{}, err + } +} + +func GetLogBillingAccountBucketApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { + obj := make(map[string]interface{}) + + billingAccountProp, err := expandLogBillingAccountBucketBillingAccountId(d.Get("billing_account"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("billing_account"); !tpgresource.IsEmptyValue(reflect.ValueOf(billingAccountProp)) && (ok || !reflect.DeepEqual(v, billingAccountProp)) { + obj["id"] = billingAccountProp + } + + nameProp, err := expandLogBillingAccountBucketName(d.Get("name"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { + obj["name"] = nameProp + } + + bucketIdProp, err := expandLogBillingAccountBucketBucketId(d.Get("bucket_id"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("bucket_id"); !tpgresource.IsEmptyValue(reflect.ValueOf(bucketIdProp)) && (ok || !reflect.DeepEqual(v, bucketIdProp)) { + obj["bucketId"] = bucketIdProp + } + + locationProp, err := expandLogBillingAccountBucketLocation(d.Get("location"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("location"); !tpgresource.IsEmptyValue(reflect.ValueOf(locationProp)) && (ok || !reflect.DeepEqual(v, locationProp)) { + obj["location"] = locationProp + } + + descriptionProp, err := expandLogBillingAccountBucketDescription(d.Get("description"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("description"); !tpgresource.IsEmptyValue(reflect.ValueOf(descriptionProp)) && (ok || !reflect.DeepEqual(v, descriptionProp)) { + obj["description"] = descriptionProp + } + + retentionDaysProp, err := expandLogBillingAccountBucketRetentionDays(d.Get("retention_days"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("retention_days"); !tpgresource.IsEmptyValue(reflect.ValueOf(retentionDaysProp)) && (ok || !reflect.DeepEqual(v, retentionDaysProp)) { + obj["retentionDays"] = retentionDaysProp + } + + indexConfigsProp, err := expandLogBillingAccountBucketIndexConfigs(d.Get("index_configs"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("index_configs"); !tpgresource.IsEmptyValue(reflect.ValueOf(indexConfigsProp)) && (ok || !reflect.DeepEqual(v, indexConfigsProp)) { + obj["indexConfigs"] = indexConfigsProp + } + + lifecycleStateProp, err := expandLogBillingAccountBucketLifecycleState(d.Get("lifecycle_state"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("lifecycle_state"); !tpgresource.IsEmptyValue(reflect.ValueOf(lifecycleStateProp)) && (ok || !reflect.DeepEqual(v, lifecycleStateProp)) { + obj["lifecycleState"] = lifecycleStateProp + } + + return obj, nil +} + +func expandLogBillingAccountBucketBillingAccountId(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + v, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/buckets/{{bucket_id}}") + if err != nil { + return nil, err + } + + return v, nil +} + +func expandLogBillingAccountBucketName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogBillingAccountBucketLifecycleState(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogBillingAccountBucketIndexConfigs(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + v = v.(*schema.Set).List() + l := v.([]interface{}) + req := make([]interface{}, 0, len(l)) + for _, raw := range l { + if raw == nil { + continue + } + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedFieldPath, err := expandLogBillingAccountBucketFieldPath(original["field_path"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedFieldPath); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["fieldPath"] = transformedFieldPath + } + + transformedType, err := expandLogBillingAccountBucketType(original["type"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedType); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["type"] = transformedType + } + + req = append(req, transformed) + } + + return req, nil +} + +func expandLogBillingAccountBucketType(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogBillingAccountBucketFieldPath(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogBillingAccountBucketRetentionDays(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogBillingAccountBucketDescription(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogBillingAccountBucketLocation(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogBillingAccountBucketBucketId(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} diff --git a/mmv1/third_party/tgc/logging_project_bucket_config.go b/mmv1/third_party/tgc/logging_project_bucket_config.go new file mode 100644 index 000000000000..77e96884a679 --- /dev/null +++ b/mmv1/third_party/tgc/logging_project_bucket_config.go @@ -0,0 +1,175 @@ +package google + +import ( + "reflect" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + + "github.com/GoogleCloudPlatform/terraform-google-conversion/v5/tfplan2cai/converters/google/resources/cai" + "github.com/hashicorp/terraform-provider-google-beta/google-beta/tpgresource" + transport_tpg "github.com/hashicorp/terraform-provider-google-beta/google-beta/transport" +) + +const logProjectBucketAssetType string = "logging.googleapis.com/LogBucket" + +func resourceConverterLogProjectBucket() cai.ResourceConverter { + return cai.ResourceConverter{ + AssetType: logProjectBucketAssetType, + Convert: GetLogProjectBucketCaiObject, + } +} + +func GetLogProjectBucketCaiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) ([]cai.Asset, error) { + name, err := cai.AssetName(d, config, "//logging.googleapis.com/projects/{{project}}/locations/{{location}}/buckets/{{bucket_id}}") + if err != nil { + return []cai.Asset{}, err + } + if obj, err := GetLogProjectBucketApiObject(d, config); err == nil { + return []cai.Asset{{ + Name: name, + Type: logProjectBucketAssetType, + Resource: &cai.AssetResource{ + Version: "v2", + DiscoveryDocumentURI: "https://logging.googleapis.com/$discovery/rest?version=v2", + DiscoveryName: "LogBucket", + Data: obj, + }, + }}, nil + } else { + return []cai.Asset{}, err + } +} + +func GetLogProjectBucketApiObject(d tpgresource.TerraformResourceData, config *transport_tpg.Config) (map[string]interface{}, error) { + obj := make(map[string]interface{}) + + organizationProp, err := expandLogProjectBucketProjectId(d.Get("project"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("project"); !tpgresource.IsEmptyValue(reflect.ValueOf(organizationProp)) && (ok || !reflect.DeepEqual(v, organizationProp)) { + obj["id"] = organizationProp + } + + nameProp, err := expandLogProjectBucketName(d.Get("name"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("name"); !tpgresource.IsEmptyValue(reflect.ValueOf(nameProp)) && (ok || !reflect.DeepEqual(v, nameProp)) { + obj["name"] = nameProp + } + + bucketIdProp, err := expandLogProjectBucketBucketId(d.Get("bucket_id"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("bucket_id"); !tpgresource.IsEmptyValue(reflect.ValueOf(bucketIdProp)) && (ok || !reflect.DeepEqual(v, bucketIdProp)) { + obj["bucketId"] = bucketIdProp + } + + locationProp, err := expandLogProjectBucketLocation(d.Get("location"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("location"); !tpgresource.IsEmptyValue(reflect.ValueOf(locationProp)) && (ok || !reflect.DeepEqual(v, locationProp)) { + obj["location"] = locationProp + } + + descriptionProp, err := expandLogProjectBucketDescription(d.Get("description"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("description"); !tpgresource.IsEmptyValue(reflect.ValueOf(descriptionProp)) && (ok || !reflect.DeepEqual(v, descriptionProp)) { + obj["description"] = descriptionProp + } + + retentionDaysProp, err := expandLogProjectBucketRetentionDays(d.Get("retention_days"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("retention_days"); !tpgresource.IsEmptyValue(reflect.ValueOf(retentionDaysProp)) && (ok || !reflect.DeepEqual(v, retentionDaysProp)) { + obj["retentionDays"] = retentionDaysProp + } + + indexConfigsProp, err := expandLogProjectBucketIndexConfigs(d.Get("index_configs"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("index_configs"); !tpgresource.IsEmptyValue(reflect.ValueOf(indexConfigsProp)) && (ok || !reflect.DeepEqual(v, indexConfigsProp)) { + obj["indexConfigs"] = indexConfigsProp + } + + lifecycleStateProp, err := expandLogProjectBucketLifecycleState(d.Get("lifecycle_state"), d, config) + if err != nil { + return nil, err + } else if v, ok := d.GetOkExists("lifecycle_state"); !tpgresource.IsEmptyValue(reflect.ValueOf(lifecycleStateProp)) && (ok || !reflect.DeepEqual(v, lifecycleStateProp)) { + obj["lifecycleState"] = lifecycleStateProp + } + + return obj, nil +} + +func expandLogProjectBucketProjectId(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + v, err := tpgresource.ReplaceVars(d, config, "projects/{{project}}/locations/{{location}}/buckets/{{bucket_id}}") + if err != nil { + return nil, err + } + + return v, nil +} + +func expandLogProjectBucketName(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogProjectBucketLifecycleState(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogProjectBucketIndexConfigs(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + v = v.(*schema.Set).List() + l := v.([]interface{}) + req := make([]interface{}, 0, len(l)) + for _, raw := range l { + if raw == nil { + continue + } + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedFieldPath, err := expandLogProjectBucketFieldPath(original["field_path"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedFieldPath); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["fieldPath"] = transformedFieldPath + } + + transformedType, err := expandLogProjectBucketType(original["type"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedType); val.IsValid() && !tpgresource.IsEmptyValue(val) { + transformed["type"] = transformedType + } + + req = append(req, transformed) + } + + return req, nil +} + +func expandLogProjectBucketType(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogProjectBucketFieldPath(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogProjectBucketRetentionDays(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogProjectBucketDescription(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogProjectBucketLocation(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} + +func expandLogProjectBucketBucketId(v interface{}, d tpgresource.TerraformResourceData, config *transport_tpg.Config) (interface{}, error) { + return v, nil +} diff --git a/mmv1/third_party/tgc/tests/data/example_google_logging_billing_account_bucket_config.json b/mmv1/third_party/tgc/tests/data/example_google_logging_billing_account_bucket_config.json new file mode 100644 index 000000000000..cd993e2be994 --- /dev/null +++ b/mmv1/third_party/tgc/tests/data/example_google_logging_billing_account_bucket_config.json @@ -0,0 +1,20 @@ +[ + { + "name": "//logging.googleapis.com/projects/{{.Provider.project}}/locations/global/buckets/_Default", + "asset_type": "logging.googleapis.com/LogBucket", + "resource": { + "version": "v2", + "discovery_document_uri": "https://logging.googleapis.com/$discovery/rest?version=v2", + "discovery_name": "LogBucket", + "parent": "//cloudresourcemanager.googleapis.com/projects/{{.Provider.project}}", + "data": { + "bucketId": "_Default", + "id": "projects/{{.Provider.project}}/locations/global/buckets/_Default", + "location": "global", + "retentionDays": 30 + } + }, + "ancestors": ["organizations/{{.OrgID}}"], + "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}" + } +] \ No newline at end of file diff --git a/mmv1/third_party/tgc/tests/data/example_google_logging_billing_account_bucket_config.tf b/mmv1/third_party/tgc/tests/data/example_google_logging_billing_account_bucket_config.tf new file mode 100644 index 000000000000..24e45fcd3ec5 --- /dev/null +++ b/mmv1/third_party/tgc/tests/data/example_google_logging_billing_account_bucket_config.tf @@ -0,0 +1,19 @@ +terraform { + required_providers { + google = { + source = "hashicorp/google-beta" + version = "~> {{.Provider.version}}" + } + } +} + +provider "google" { + {{if .Provider.credentials }}credentials = "{{.Provider.credentials}}"{{end}} +} + +resource "google_logging_billing_account_bucket_config" "basic" { + billing_account = "{{.Project.BillingAccountName}}" + location = "global" + retention_days = 30 + bucket_id = "_Default" +} \ No newline at end of file diff --git a/mmv1/third_party/tgc/tests/data/example_google_logging_project_bucket_config.json b/mmv1/third_party/tgc/tests/data/example_google_logging_project_bucket_config.json new file mode 100644 index 000000000000..cd993e2be994 --- /dev/null +++ b/mmv1/third_party/tgc/tests/data/example_google_logging_project_bucket_config.json @@ -0,0 +1,20 @@ +[ + { + "name": "//logging.googleapis.com/projects/{{.Provider.project}}/locations/global/buckets/_Default", + "asset_type": "logging.googleapis.com/LogBucket", + "resource": { + "version": "v2", + "discovery_document_uri": "https://logging.googleapis.com/$discovery/rest?version=v2", + "discovery_name": "LogBucket", + "parent": "//cloudresourcemanager.googleapis.com/projects/{{.Provider.project}}", + "data": { + "bucketId": "_Default", + "id": "projects/{{.Provider.project}}/locations/global/buckets/_Default", + "location": "global", + "retentionDays": 30 + } + }, + "ancestors": ["organizations/{{.OrgID}}"], + "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}" + } +] \ No newline at end of file diff --git a/mmv1/third_party/tgc/tests/data/example_google_logging_project_bucket_config.tf b/mmv1/third_party/tgc/tests/data/example_google_logging_project_bucket_config.tf new file mode 100644 index 000000000000..06f3667fcba1 --- /dev/null +++ b/mmv1/third_party/tgc/tests/data/example_google_logging_project_bucket_config.tf @@ -0,0 +1,19 @@ +terraform { + required_providers { + google = { + source = "hashicorp/google-beta" + version = "~> {{.Provider.version}}" + } + } +} + +provider "google" { + {{if .Provider.credentials }}credentials = "{{.Provider.credentials}}"{{end}} +} + +resource "google_logging_project_bucket_config" "basic" { + project = "{{.Provider.project}}" + location = "global" + retention_days = 30 + bucket_id = "_Default" +} \ No newline at end of file diff --git a/mmv1/third_party/tgc/tests/data/example_pubsub_lite_subscription.json b/mmv1/third_party/tgc/tests/data/example_pubsub_lite_subscription.json index f3525a545514..38d12cb5204f 100644 --- a/mmv1/third_party/tgc/tests/data/example_pubsub_lite_subscription.json +++ b/mmv1/third_party/tgc/tests/data/example_pubsub_lite_subscription.json @@ -1,6 +1,6 @@ [ { - "name": "//pubsublite.googleapis.com/projects/{{.Provider.project}}/locations/us-central1a/subscriptions/example-subscription", + "name": "//pubsublite.googleapis.com/projects/{{.Provider.project}}/locations/us-central1-a/subscriptions/example-subscription", "asset_type": "pubsublite.googleapis.com/Subscription", "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", "resource": { @@ -12,7 +12,7 @@ "deliveryConfig": { "deliveryRequirement": "DELIVER_AFTER_STORED" }, - "topic": "projects/{{.Provider.project}}/locations/us-central1a/topics/my-topic" + "topic": "projects/{{.Provider.project}}/locations/us-central1-a/topics/my-topic" } } } diff --git a/mmv1/third_party/tgc/tests/data/example_pubsub_lite_subscription.tf b/mmv1/third_party/tgc/tests/data/example_pubsub_lite_subscription.tf index 8a99f4e498be..0936d1eaab69 100644 --- a/mmv1/third_party/tgc/tests/data/example_pubsub_lite_subscription.tf +++ b/mmv1/third_party/tgc/tests/data/example_pubsub_lite_subscription.tf @@ -30,7 +30,7 @@ provider "google" { resource "google_pubsub_lite_subscription" "example" { name = "example-subscription" topic = "my-topic" - zone = "us-central1a" + zone = "us-central1-a" delivery_config { delivery_requirement = "DELIVER_AFTER_STORED" } diff --git a/mmv1/third_party/tgc/tests/data/example_pubsub_lite_topic.json b/mmv1/third_party/tgc/tests/data/example_pubsub_lite_topic.json index 0edf91c77a40..89dd6976827a 100644 --- a/mmv1/third_party/tgc/tests/data/example_pubsub_lite_topic.json +++ b/mmv1/third_party/tgc/tests/data/example_pubsub_lite_topic.json @@ -1,6 +1,6 @@ [ { - "name": "//pubsublite.googleapis.com/projects/{{.Provider.project}}/locations/us-central1a/topics/example-topic", + "name": "//pubsublite.googleapis.com/projects/{{.Provider.project}}/locations/us-central1-a/topics/example-topic", "asset_type": "pubsublite.googleapis.com/Topic", "ancestry_path": "{{.Ancestry}}/project/{{.Provider.project}}", "resource": { @@ -17,7 +17,7 @@ "count": 1 }, "reservationConfig": { - "throughputReservation": "projects/{{.Provider.project}}/locations/us-central/reservations/example-reservation" + "throughputReservation": "projects/{{.Provider.project}}/locations/us-central1/reservations/example-reservation" }, "retentionConfig": { "perPartitionBytes": "32212254720" diff --git a/mmv1/third_party/tgc/tests/data/example_pubsub_lite_topic.tf b/mmv1/third_party/tgc/tests/data/example_pubsub_lite_topic.tf index 6697b15713e8..47607a535ce8 100644 --- a/mmv1/third_party/tgc/tests/data/example_pubsub_lite_topic.tf +++ b/mmv1/third_party/tgc/tests/data/example_pubsub_lite_topic.tf @@ -29,7 +29,7 @@ provider "google" { resource "google_pubsub_lite_topic" "example" { name = "example-topic" - zone = "us-central1a" + zone = "us-central1-a" partition_config { count = 1 diff --git a/tpgtools/go.mod b/tpgtools/go.mod index 67f7b6571bd9..5e9a41311a90 100644 --- a/tpgtools/go.mod +++ b/tpgtools/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( bitbucket.org/creachadair/stringset v0.0.11 - github.com/GoogleCloudPlatform/declarative-resource-client-library v1.67.0 + github.com/GoogleCloudPlatform/declarative-resource-client-library v1.68.0 github.com/golang/glog v1.1.2 github.com/hashicorp/hcl v1.0.0 github.com/kylelemons/godebug v1.1.0 diff --git a/tpgtools/go.sum b/tpgtools/go.sum index 0e90e5744d3a..9a48751b2af4 100644 --- a/tpgtools/go.sum +++ b/tpgtools/go.sum @@ -6,8 +6,8 @@ cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdi cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.67.0 h1:FBKsgWIOEdtpx2YuF+aBH33K0Ih25D3xuKyp9peH4jc= -github.com/GoogleCloudPlatform/declarative-resource-client-library v1.67.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.68.0 h1:LIPIYi4hy7ttUSrziY/TYwMDuEvvV593n80kRmz6nZ4= +github.com/GoogleCloudPlatform/declarative-resource-client-library v1.68.0/go.mod h1:pL2Qt5HT+x6xrTd806oMiM3awW6kNIXB/iiuClz6m6k= github.com/cenkalti/backoff v2.2.1+incompatible h1:tNowT99t7UNflLxfYYSlKYsBpXdEet03Pg2g16Swow4= github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=