From c7d6eaef25794d3ebfbbd4d8d34f0bc548d996e1 Mon Sep 17 00:00:00 2001 From: Modular Magician Date: Wed, 17 Jul 2024 20:22:44 +0000 Subject: [PATCH] Merge pull request #11179 from c2thorn/sync-main-FEATURE-BRANCH-6.0.0 Sync main feature branch 6.0.0 - 7/16 [upstream:4b86cb21f180699aa01ab9cea21812bd58c46815] Signed-off-by: Modular Magician --- .../backing_file.tf | 15 +++ bigquery_dataset_resource_tags/main.tf | 34 ++++++ bigquery_dataset_resource_tags/motd | 7 ++ bigquery_dataset_resource_tags/tutorial.md | 79 ++++++++++++++ compute_mig_resize_request/backing_file.tf | 15 +++ compute_mig_resize_request/main.tf | 69 ++++++++++++ compute_mig_resize_request/motd | 7 ++ compute_mig_resize_request/tutorial.md | 79 ++++++++++++++ datastream_stream_bigquery/main.tf | 1 + .../backing_file.tf | 15 +++ .../main.tf | 102 ++++++++++++++++++ datastream_stream_bigquery_append_only/motd | 7 ++ .../tutorial.md | 79 ++++++++++++++ .../backing_file.tf | 15 +++ network_connectivity_hub_basic/main.tf | 7 ++ network_connectivity_hub_basic/motd | 7 ++ network_connectivity_hub_basic/tutorial.md | 79 ++++++++++++++ 17 files changed, 617 insertions(+) create mode 100644 bigquery_dataset_resource_tags/backing_file.tf create mode 100644 bigquery_dataset_resource_tags/main.tf create mode 100644 bigquery_dataset_resource_tags/motd create mode 100644 bigquery_dataset_resource_tags/tutorial.md create mode 100644 compute_mig_resize_request/backing_file.tf create mode 100644 compute_mig_resize_request/main.tf create mode 100644 compute_mig_resize_request/motd create mode 100644 compute_mig_resize_request/tutorial.md create mode 100644 datastream_stream_bigquery_append_only/backing_file.tf create mode 100644 datastream_stream_bigquery_append_only/main.tf create mode 100644 datastream_stream_bigquery_append_only/motd create mode 100644 datastream_stream_bigquery_append_only/tutorial.md create mode 100644 network_connectivity_hub_basic/backing_file.tf create mode 100644 network_connectivity_hub_basic/main.tf create mode 100644 network_connectivity_hub_basic/motd create mode 100644 network_connectivity_hub_basic/tutorial.md diff --git a/bigquery_dataset_resource_tags/backing_file.tf b/bigquery_dataset_resource_tags/backing_file.tf new file mode 100644 index 00000000..c60b1199 --- /dev/null +++ b/bigquery_dataset_resource_tags/backing_file.tf @@ -0,0 +1,15 @@ +# This file has some scaffolding to make sure that names are unique and that +# a region and zone are selected when you try to create your Terraform resources. + +locals { + name_suffix = "${random_pet.suffix.id}" +} + +resource "random_pet" "suffix" { + length = 2 +} + +provider "google" { + region = "us-central1" + zone = "us-central1-c" +} diff --git a/bigquery_dataset_resource_tags/main.tf b/bigquery_dataset_resource_tags/main.tf new file mode 100644 index 00000000..bafe1c29 --- /dev/null +++ b/bigquery_dataset_resource_tags/main.tf @@ -0,0 +1,34 @@ +data "google_project" "project" { +} + +resource "google_tags_tag_key" "tag_key1" { + parent = "projects/${data.google_project.project.number}" + short_name = "tag_key1-${local.name_suffix}" +} + +resource "google_tags_tag_value" "tag_value1" { + parent = "tagKeys/${google_tags_tag_key.tag_key1.name}" + short_name = "tag_value1-${local.name_suffix}" +} + +resource "google_tags_tag_key" "tag_key2" { + parent = "projects/${data.google_project.project.number}" + short_name = "tag_key2-${local.name_suffix}" +} + +resource "google_tags_tag_value" "tag_value2" { + parent = "tagKeys/${google_tags_tag_key.tag_key2.name}" + short_name = "tag_value2-${local.name_suffix}" +} + +resource "google_bigquery_dataset" "dataset" { + dataset_id = "dataset-${local.name_suffix}" + friendly_name = "test" + description = "This is a test description" + location = "EU" + + resource_tags = { + "${data.google_project.project.project_id}/${google_tags_tag_key.tag_key1.short_name}" = "${google_tags_tag_value.tag_value1.short_name}" + "${data.google_project.project.project_id}/${google_tags_tag_key.tag_key2.short_name}" = "${google_tags_tag_value.tag_value2.short_name}" + } +} diff --git a/bigquery_dataset_resource_tags/motd b/bigquery_dataset_resource_tags/motd new file mode 100644 index 00000000..45a906e8 --- /dev/null +++ b/bigquery_dataset_resource_tags/motd @@ -0,0 +1,7 @@ +=== + +These examples use real resources that will be billed to the +Google Cloud Platform project you use - so make sure that you +run "terraform destroy" before quitting! + +=== diff --git a/bigquery_dataset_resource_tags/tutorial.md b/bigquery_dataset_resource_tags/tutorial.md new file mode 100644 index 00000000..6d7a5f8e --- /dev/null +++ b/bigquery_dataset_resource_tags/tutorial.md @@ -0,0 +1,79 @@ +# Bigquery Dataset Resource Tags - Terraform + +## Setup + + + +Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform. + + + +Terraform provisions real GCP resources, so anything you create in this session will be billed against this project. + +## Terraforming! + +Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command +to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up +the project name from the environment variable. + +```bash +export GOOGLE_CLOUD_PROJECT={{project-id}} +``` + +After that, let's get Terraform started. Run the following to pull in the providers. + +```bash +terraform init +``` + +With the providers downloaded and a project set, you're ready to use Terraform. Go ahead! + +```bash +terraform apply +``` + +Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan. + +```bash +yes +``` + + +## Post-Apply + +### Editing your config + +Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed. + +```bash +terraform plan +``` + +So let's make a change! Try editing a number, or appending a value to the name in the editor. Then, +run a 'plan' again. + +```bash +terraform plan +``` + +Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes +at the 'yes' prompt. + +```bash +terraform apply +``` + +```bash +yes +``` + +## Cleanup + +Run the following to remove the resources Terraform provisioned: + +```bash +terraform destroy +``` +```bash +yes +``` diff --git a/compute_mig_resize_request/backing_file.tf b/compute_mig_resize_request/backing_file.tf new file mode 100644 index 00000000..c60b1199 --- /dev/null +++ b/compute_mig_resize_request/backing_file.tf @@ -0,0 +1,15 @@ +# This file has some scaffolding to make sure that names are unique and that +# a region and zone are selected when you try to create your Terraform resources. + +locals { + name_suffix = "${random_pet.suffix.id}" +} + +resource "random_pet" "suffix" { + length = 2 +} + +provider "google" { + region = "us-central1" + zone = "us-central1-c" +} diff --git a/compute_mig_resize_request/main.tf b/compute_mig_resize_request/main.tf new file mode 100644 index 00000000..c60d5713 --- /dev/null +++ b/compute_mig_resize_request/main.tf @@ -0,0 +1,69 @@ +resource "google_compute_region_instance_template" "a3_dws" { + name = "a3-dws" + region = "us-central1" + description = "This template is used to create a mig instance that is compatible with DWS resize requests." + instance_description = "A3 GPU" + machine_type = "a3-highgpu-8g" + can_ip_forward = false + + scheduling { + automatic_restart = false + on_host_maintenance = "TERMINATE" + } + + disk { + source_image = "cos-cloud/cos-105-lts" + auto_delete = true + boot = true + disk_type = "pd-ssd" + disk_size_gb = "960" + mode = "READ_WRITE" + } + + guest_accelerator { + type = "nvidia-h100-80gb" + count = 8 + } + + reservation_affinity { + type = "NO_RESERVATION" + } + + shielded_instance_config { + enable_vtpm = true + enable_integrity_monitoring = true + } + + network_interface { + network = "default" + } +} + +resource "google_compute_instance_group_manager" "a3_dws" { + name = "a3-dws" + base_instance_name = "a3-dws" + zone = "us-central1-a" + + version { + instance_template = google_compute_region_instance_template.a3_dws.self_link + } + + instance_lifecycle_policy { + default_action_on_failure = "DO_NOTHING" + } + + wait_for_instances = false + +} + +resource "google_compute_resize_request" "a3_resize_request" { + name = "a3-dws-${local.name_suffix}" + instance_group_manager = google_compute_instance_group_manager.a3_dws.name + zone = "us-central1-a" + description = "Test resize request resource" + resize_by = 2 + requested_run_duration { + seconds = 14400 + nanos = 0 + } +} diff --git a/compute_mig_resize_request/motd b/compute_mig_resize_request/motd new file mode 100644 index 00000000..45a906e8 --- /dev/null +++ b/compute_mig_resize_request/motd @@ -0,0 +1,7 @@ +=== + +These examples use real resources that will be billed to the +Google Cloud Platform project you use - so make sure that you +run "terraform destroy" before quitting! + +=== diff --git a/compute_mig_resize_request/tutorial.md b/compute_mig_resize_request/tutorial.md new file mode 100644 index 00000000..5e98fd22 --- /dev/null +++ b/compute_mig_resize_request/tutorial.md @@ -0,0 +1,79 @@ +# Compute Mig Resize Request - Terraform + +## Setup + + + +Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform. + + + +Terraform provisions real GCP resources, so anything you create in this session will be billed against this project. + +## Terraforming! + +Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command +to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up +the project name from the environment variable. + +```bash +export GOOGLE_CLOUD_PROJECT={{project-id}} +``` + +After that, let's get Terraform started. Run the following to pull in the providers. + +```bash +terraform init +``` + +With the providers downloaded and a project set, you're ready to use Terraform. Go ahead! + +```bash +terraform apply +``` + +Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan. + +```bash +yes +``` + + +## Post-Apply + +### Editing your config + +Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed. + +```bash +terraform plan +``` + +So let's make a change! Try editing a number, or appending a value to the name in the editor. Then, +run a 'plan' again. + +```bash +terraform plan +``` + +Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes +at the 'yes' prompt. + +```bash +terraform apply +``` + +```bash +yes +``` + +## Cleanup + +Run the following to remove the resources Terraform provisioned: + +```bash +terraform destroy +``` +```bash +yes +``` diff --git a/datastream_stream_bigquery/main.tf b/datastream_stream_bigquery/main.tf index 3bacddf2..f7cfd78f 100644 --- a/datastream_stream_bigquery/main.tf +++ b/datastream_stream_bigquery/main.tf @@ -106,6 +106,7 @@ resource "google_datastream_stream" "default" { kms_key_name = "bigquery-kms-name-${local.name_suffix}" } } + merge {} } } diff --git a/datastream_stream_bigquery_append_only/backing_file.tf b/datastream_stream_bigquery_append_only/backing_file.tf new file mode 100644 index 00000000..c60b1199 --- /dev/null +++ b/datastream_stream_bigquery_append_only/backing_file.tf @@ -0,0 +1,15 @@ +# This file has some scaffolding to make sure that names are unique and that +# a region and zone are selected when you try to create your Terraform resources. + +locals { + name_suffix = "${random_pet.suffix.id}" +} + +resource "random_pet" "suffix" { + length = 2 +} + +provider "google" { + region = "us-central1" + zone = "us-central1-c" +} diff --git a/datastream_stream_bigquery_append_only/main.tf b/datastream_stream_bigquery_append_only/main.tf new file mode 100644 index 00000000..c2577fa9 --- /dev/null +++ b/datastream_stream_bigquery_append_only/main.tf @@ -0,0 +1,102 @@ +data "google_project" "project" { +} + +resource "google_sql_database_instance" "instance" { + name = "my-instance-${local.name_suffix}" + database_version = "MYSQL_8_0" + region = "us-central1" + settings { + tier = "db-f1-micro" + backup_configuration { + enabled = true + binary_log_enabled = true + } + + ip_configuration { + + // Datastream IPs will vary by region. + authorized_networks { + value = "34.71.242.81" + } + + authorized_networks { + value = "34.72.28.29" + } + + authorized_networks { + value = "34.67.6.157" + } + + authorized_networks { + value = "34.67.234.134" + } + + authorized_networks { + value = "34.72.239.218" + } + } + } + + deletion_protection = false +} + +resource "google_sql_database" "db" { + instance = google_sql_database_instance.instance.name + name = "db" +} + +resource "random_password" "pwd" { + length = 16 + special = false +} + +resource "google_sql_user" "user" { + name = "user" + instance = google_sql_database_instance.instance.name + host = "%" + password = random_password.pwd.result +} + +resource "google_datastream_connection_profile" "source_connection_profile" { + display_name = "Source connection profile" + location = "us-central1" + connection_profile_id = "source-profile-${local.name_suffix}" + + mysql_profile { + hostname = google_sql_database_instance.instance.public_ip_address + username = google_sql_user.user.name + password = google_sql_user.user.password + } +} + +resource "google_datastream_connection_profile" "destination_connection_profile" { + display_name = "Connection profile" + location = "us-central1" + connection_profile_id = "destination-profile-${local.name_suffix}" + + bigquery_profile {} +} + +resource "google_datastream_stream" "default" { + stream_id = "my-stream-${local.name_suffix}" + location = "us-central1" + display_name = "my stream" + source_config { + source_connection_profile = google_datastream_connection_profile.source_connection_profile.id + mysql_source_config {} + } + destination_config { + destination_connection_profile = google_datastream_connection_profile.destination_connection_profile.id + bigquery_destination_config { + source_hierarchy_datasets { + dataset_template { + location = "us-central1" + } + } + append_only {} + } + } + + backfill_none { + } +} diff --git a/datastream_stream_bigquery_append_only/motd b/datastream_stream_bigquery_append_only/motd new file mode 100644 index 00000000..45a906e8 --- /dev/null +++ b/datastream_stream_bigquery_append_only/motd @@ -0,0 +1,7 @@ +=== + +These examples use real resources that will be billed to the +Google Cloud Platform project you use - so make sure that you +run "terraform destroy" before quitting! + +=== diff --git a/datastream_stream_bigquery_append_only/tutorial.md b/datastream_stream_bigquery_append_only/tutorial.md new file mode 100644 index 00000000..87528c7f --- /dev/null +++ b/datastream_stream_bigquery_append_only/tutorial.md @@ -0,0 +1,79 @@ +# Datastream Stream Bigquery Append Only - Terraform + +## Setup + + + +Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform. + + + +Terraform provisions real GCP resources, so anything you create in this session will be billed against this project. + +## Terraforming! + +Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command +to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up +the project name from the environment variable. + +```bash +export GOOGLE_CLOUD_PROJECT={{project-id}} +``` + +After that, let's get Terraform started. Run the following to pull in the providers. + +```bash +terraform init +``` + +With the providers downloaded and a project set, you're ready to use Terraform. Go ahead! + +```bash +terraform apply +``` + +Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan. + +```bash +yes +``` + + +## Post-Apply + +### Editing your config + +Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed. + +```bash +terraform plan +``` + +So let's make a change! Try editing a number, or appending a value to the name in the editor. Then, +run a 'plan' again. + +```bash +terraform plan +``` + +Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes +at the 'yes' prompt. + +```bash +terraform apply +``` + +```bash +yes +``` + +## Cleanup + +Run the following to remove the resources Terraform provisioned: + +```bash +terraform destroy +``` +```bash +yes +``` diff --git a/network_connectivity_hub_basic/backing_file.tf b/network_connectivity_hub_basic/backing_file.tf new file mode 100644 index 00000000..c60b1199 --- /dev/null +++ b/network_connectivity_hub_basic/backing_file.tf @@ -0,0 +1,15 @@ +# This file has some scaffolding to make sure that names are unique and that +# a region and zone are selected when you try to create your Terraform resources. + +locals { + name_suffix = "${random_pet.suffix.id}" +} + +resource "random_pet" "suffix" { + length = 2 +} + +provider "google" { + region = "us-central1" + zone = "us-central1-c" +} diff --git a/network_connectivity_hub_basic/main.tf b/network_connectivity_hub_basic/main.tf new file mode 100644 index 00000000..ed2db999 --- /dev/null +++ b/network_connectivity_hub_basic/main.tf @@ -0,0 +1,7 @@ +resource "google_network_connectivity_hub" "primary" { + name = "basic-${local.name_suffix}" + description = "A sample hub" + labels = { + label-one = "value-one" + } +} diff --git a/network_connectivity_hub_basic/motd b/network_connectivity_hub_basic/motd new file mode 100644 index 00000000..45a906e8 --- /dev/null +++ b/network_connectivity_hub_basic/motd @@ -0,0 +1,7 @@ +=== + +These examples use real resources that will be billed to the +Google Cloud Platform project you use - so make sure that you +run "terraform destroy" before quitting! + +=== diff --git a/network_connectivity_hub_basic/tutorial.md b/network_connectivity_hub_basic/tutorial.md new file mode 100644 index 00000000..db5a6099 --- /dev/null +++ b/network_connectivity_hub_basic/tutorial.md @@ -0,0 +1,79 @@ +# Network Connectivity Hub Basic - Terraform + +## Setup + + + +Welcome to Terraform in Google Cloud Shell! We need you to let us know what project you'd like to use with Terraform. + + + +Terraform provisions real GCP resources, so anything you create in this session will be billed against this project. + +## Terraforming! + +Let's use {{project-id}} with Terraform! Click the Cloud Shell icon below to copy the command +to your shell, and then run it from the shell by pressing Enter/Return. Terraform will pick up +the project name from the environment variable. + +```bash +export GOOGLE_CLOUD_PROJECT={{project-id}} +``` + +After that, let's get Terraform started. Run the following to pull in the providers. + +```bash +terraform init +``` + +With the providers downloaded and a project set, you're ready to use Terraform. Go ahead! + +```bash +terraform apply +``` + +Terraform will show you what it plans to do, and prompt you to accept. Type "yes" to accept the plan. + +```bash +yes +``` + + +## Post-Apply + +### Editing your config + +Now you've provisioned your resources in GCP! If you run a "plan", you should see no changes needed. + +```bash +terraform plan +``` + +So let's make a change! Try editing a number, or appending a value to the name in the editor. Then, +run a 'plan' again. + +```bash +terraform plan +``` + +Afterwards you can run an apply, which implicitly does a plan and shows you the intended changes +at the 'yes' prompt. + +```bash +terraform apply +``` + +```bash +yes +``` + +## Cleanup + +Run the following to remove the resources Terraform provisioned: + +```bash +terraform destroy +``` +```bash +yes +```