From cbe2a3ae1407a57360c6754addf4ccbeb34e7188 Mon Sep 17 00:00:00 2001 From: Modular Magician Date: Mon, 21 Jun 2021 18:50:24 +0000 Subject: [PATCH] Add Vertex AI Dataset (#4863) * initial Dataset Co-authored-by: upodroid * pass d to OperationWaiters * fetch region from self_link * fix regex and add test Signed-off-by: Modular Magician --- .changelog/4863.txt | 3 + google/config.go | 4 + google/provider.go | 14 +- google/resource_vertex_ai_dataset.go | 462 ++++++++++++++++++ ...source_vertex_ai_dataset_generated_test.go | 86 ++++ google/self_link_helpers.go | 12 + google/self_link_helpers_test.go | 12 + google/vertex_ai_operation.go | 64 +++ .../docs/r/vertex_ai_dataset.html.markdown | 122 +++++ website/google.erb | 16 + 10 files changed, 793 insertions(+), 2 deletions(-) create mode 100644 .changelog/4863.txt create mode 100644 google/resource_vertex_ai_dataset.go create mode 100644 google/resource_vertex_ai_dataset_generated_test.go create mode 100644 google/vertex_ai_operation.go create mode 100644 website/docs/r/vertex_ai_dataset.html.markdown diff --git a/.changelog/4863.txt b/.changelog/4863.txt new file mode 100644 index 00000000000..b7593f7ec59 --- /dev/null +++ b/.changelog/4863.txt @@ -0,0 +1,3 @@ +```release-note:new-resource +`google_vertex_ai_dataset` +``` diff --git a/google/config.go b/google/config.go index aaf86910afc..1cf87384ec0 100644 --- a/google/config.go +++ b/google/config.go @@ -139,6 +139,7 @@ type Config struct { StorageBasePath string TagsBasePath string TPUBasePath string + VertexAIBasePath string VPCAccessBasePath string WorkflowsBasePath string @@ -225,6 +226,7 @@ const SQLBasePathKey = "SQL" const StorageBasePathKey = "Storage" const TagsBasePathKey = "Tags" const TPUBasePathKey = "TPU" +const VertexAIBasePathKey = "VertexAI" const VPCAccessBasePathKey = "VPCAccess" const WorkflowsBasePathKey = "Workflows" const CloudBillingBasePathKey = "CloudBilling" @@ -303,6 +305,7 @@ var DefaultBasePaths = map[string]string{ StorageBasePathKey: "https://storage.googleapis.com/storage/v1/", TagsBasePathKey: "https://cloudresourcemanager.googleapis.com/v3/", TPUBasePathKey: "https://tpu.googleapis.com/v1/", + VertexAIBasePathKey: "https://{{region}}-aiplatform.googleapis.com/v1/", VPCAccessBasePathKey: "https://vpcaccess.googleapis.com/v1/", WorkflowsBasePathKey: "https://workflows.googleapis.com/v1/", CloudBillingBasePathKey: "https://cloudbilling.googleapis.com/v1/", @@ -1134,6 +1137,7 @@ func ConfigureBasePaths(c *Config) { c.StorageBasePath = DefaultBasePaths[StorageBasePathKey] c.TagsBasePath = DefaultBasePaths[TagsBasePathKey] c.TPUBasePath = DefaultBasePaths[TPUBasePathKey] + c.VertexAIBasePath = DefaultBasePaths[VertexAIBasePathKey] c.VPCAccessBasePath = DefaultBasePaths[VPCAccessBasePathKey] c.WorkflowsBasePath = DefaultBasePaths[WorkflowsBasePathKey] diff --git a/google/provider.go b/google/provider.go index 28c610c3a90..653bbbc9496 100644 --- a/google/provider.go +++ b/google/provider.go @@ -611,6 +611,14 @@ func Provider() *schema.Provider { "GOOGLE_TPU_CUSTOM_ENDPOINT", }, DefaultBasePaths[TPUBasePathKey]), }, + "vertex_ai_custom_endpoint": { + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateCustomEndpoint, + DefaultFunc: schema.MultiEnvDefaultFunc([]string{ + "GOOGLE_VERTEX_AI_CUSTOM_ENDPOINT", + }, DefaultBasePaths[VertexAIBasePathKey]), + }, "vpc_access_custom_endpoint": { Type: schema.TypeString, Optional: true, @@ -758,9 +766,9 @@ func Provider() *schema.Provider { return provider } -// Generated resources: 196 +// Generated resources: 197 // Generated IAM resources: 87 -// Total generated resources: 283 +// Total generated resources: 284 func ResourceMap() map[string]*schema.Resource { resourceMap, _ := ResourceMapWithErrors() return resourceMap @@ -1050,6 +1058,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { "google_tags_tag_value_iam_policy": ResourceIamPolicy(TagsTagValueIamSchema, TagsTagValueIamUpdaterProducer, TagsTagValueIdParseFunc), "google_tags_tag_binding": resourceTagsTagBinding(), "google_tpu_node": resourceTPUNode(), + "google_vertex_ai_dataset": resourceVertexAIDataset(), "google_vpc_access_connector": resourceVPCAccessConnector(), "google_workflows_workflow": resourceWorkflowsWorkflow(), }, @@ -1318,6 +1327,7 @@ func providerConfigure(ctx context.Context, d *schema.ResourceData, p *schema.Pr config.StorageBasePath = d.Get("storage_custom_endpoint").(string) config.TagsBasePath = d.Get("tags_custom_endpoint").(string) config.TPUBasePath = d.Get("tpu_custom_endpoint").(string) + config.VertexAIBasePath = d.Get("vertex_ai_custom_endpoint").(string) config.VPCAccessBasePath = d.Get("vpc_access_custom_endpoint").(string) config.WorkflowsBasePath = d.Get("workflows_custom_endpoint").(string) diff --git a/google/resource_vertex_ai_dataset.go b/google/resource_vertex_ai_dataset.go new file mode 100644 index 00000000000..21d80379603 --- /dev/null +++ b/google/resource_vertex_ai_dataset.go @@ -0,0 +1,462 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "log" + "reflect" + "strings" + "time" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func resourceVertexAIDataset() *schema.Resource { + return &schema.Resource{ + Create: resourceVertexAIDatasetCreate, + Read: resourceVertexAIDatasetRead, + Update: resourceVertexAIDatasetUpdate, + Delete: resourceVertexAIDatasetDelete, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(6 * time.Minute), + Update: schema.DefaultTimeout(6 * time.Minute), + Delete: schema.DefaultTimeout(10 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "display_name": { + Type: schema.TypeString, + Required: true, + Description: `The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters.`, + }, + "metadata_schema_uri": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `Points to a YAML file stored on Google Cloud Storage describing additional information about the Dataset. The schema is defined as an OpenAPI 3.0.2 Schema Object. The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/metadata/.`, + }, + "encryption_spec": { + Type: schema.TypeList, + Optional: true, + ForceNew: true, + Description: `Customer-managed encryption key spec for a Dataset. If set, this Dataset and all sub-resources of this Dataset will be secured by this key.`, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "kms_key_name": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: `Required. The Cloud KMS resource identifier of the customer managed encryption key used to protect a resource. +Has the form: projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key. The key needs to be in the same region as where the resource is created.`, + }, + }, + }, + }, + "labels": { + Type: schema.TypeMap, + Computed: true, + Optional: true, + Description: `A set of key/value label pairs to assign to this Workflow.`, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "region": { + Type: schema.TypeString, + Computed: true, + Optional: true, + ForceNew: true, + Description: `The region of the dataset. eg us-central1`, + }, + "create_time": { + Type: schema.TypeString, + Computed: true, + Description: `The timestamp of when the workflow was created in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits.`, + }, + "name": { + Type: schema.TypeString, + Computed: true, + Description: `The resource name of the Dataset. This value is set by Google.`, + }, + "update_time": { + Type: schema.TypeString, + Computed: true, + Description: `The timestamp of when the workflow was last updated in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits.`, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + UseJSONNumber: true, + } +} + +func resourceVertexAIDatasetCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + obj := make(map[string]interface{}) + displayNameProp, err := expandVertexAIDatasetDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !isEmptyValue(reflect.ValueOf(displayNameProp)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + labelsProp, err := expandVertexAIDatasetLabels(d.Get("labels"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("labels"); !isEmptyValue(reflect.ValueOf(labelsProp)) && (ok || !reflect.DeepEqual(v, labelsProp)) { + obj["labels"] = labelsProp + } + encryptionSpecProp, err := expandVertexAIDatasetEncryptionSpec(d.Get("encryption_spec"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("encryption_spec"); !isEmptyValue(reflect.ValueOf(encryptionSpecProp)) && (ok || !reflect.DeepEqual(v, encryptionSpecProp)) { + obj["encryptionSpec"] = encryptionSpecProp + } + metadataSchemaUriProp, err := expandVertexAIDatasetMetadataSchemaUri(d.Get("metadata_schema_uri"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("metadata_schema_uri"); !isEmptyValue(reflect.ValueOf(metadataSchemaUriProp)) && (ok || !reflect.DeepEqual(v, metadataSchemaUriProp)) { + obj["metadataSchemaUri"] = metadataSchemaUriProp + } + + url, err := replaceVars(d, config, "{{VertexAIBasePath}}projects/{{project}}/locations/{{region}}/datasets") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new Dataset: %#v", obj) + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for Dataset: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "POST", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutCreate)) + if err != nil { + return fmt.Errorf("Error creating Dataset: %s", err) + } + + // Store the ID now + id, err := replaceVars(d, config, "{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + // Use the resource in the operation response to populate + // identity fields and d.Id() before read + var opRes map[string]interface{} + err = vertexAIOperationWaitTimeWithResponse( + config, res, &opRes, project, "Creating Dataset", userAgent, + d.Timeout(schema.TimeoutCreate)) + if err != nil { + // The resource didn't actually create + d.SetId("") + return fmt.Errorf("Error waiting to create Dataset: %s", err) + } + + if err := d.Set("name", flattenVertexAIDatasetName(opRes["name"], d, config)); err != nil { + return err + } + + // This may have caused the ID to update - update it if so. + id, err = replaceVars(d, config, "{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + log.Printf("[DEBUG] Finished creating Dataset %q: %#v", d.Id(), res) + + return resourceVertexAIDatasetRead(d, meta) +} + +func resourceVertexAIDatasetRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + url, err := replaceVars(d, config, "{{VertexAIBasePath}}{{name}}") + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for Dataset: %s", err) + } + billingProject = project + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequest(config, "GET", billingProject, url, userAgent, nil) + if err != nil { + return handleNotFoundError(err, d, fmt.Sprintf("VertexAIDataset %q", d.Id())) + } + + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error reading Dataset: %s", err) + } + + if err := d.Set("name", flattenVertexAIDatasetName(res["name"], d, config)); err != nil { + return fmt.Errorf("Error reading Dataset: %s", err) + } + if err := d.Set("display_name", flattenVertexAIDatasetDisplayName(res["displayName"], d, config)); err != nil { + return fmt.Errorf("Error reading Dataset: %s", err) + } + if err := d.Set("create_time", flattenVertexAIDatasetCreateTime(res["createTime"], d, config)); err != nil { + return fmt.Errorf("Error reading Dataset: %s", err) + } + if err := d.Set("update_time", flattenVertexAIDatasetUpdateTime(res["updateTime"], d, config)); err != nil { + return fmt.Errorf("Error reading Dataset: %s", err) + } + if err := d.Set("labels", flattenVertexAIDatasetLabels(res["labels"], d, config)); err != nil { + return fmt.Errorf("Error reading Dataset: %s", err) + } + if err := d.Set("encryption_spec", flattenVertexAIDatasetEncryptionSpec(res["encryptionSpec"], d, config)); err != nil { + return fmt.Errorf("Error reading Dataset: %s", err) + } + if err := d.Set("metadata_schema_uri", flattenVertexAIDatasetMetadataSchemaUri(res["metadataSchemaUri"], d, config)); err != nil { + return fmt.Errorf("Error reading Dataset: %s", err) + } + + return nil +} + +func resourceVertexAIDatasetUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for Dataset: %s", err) + } + billingProject = project + + obj := make(map[string]interface{}) + displayNameProp, err := expandVertexAIDatasetDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + labelsProp, err := expandVertexAIDatasetLabels(d.Get("labels"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("labels"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, labelsProp)) { + obj["labels"] = labelsProp + } + + url, err := replaceVars(d, config, "{{VertexAIBasePath}}{{name}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Updating Dataset %q: %#v", d.Id(), obj) + updateMask := []string{} + + if d.HasChange("display_name") { + updateMask = append(updateMask, "displayName") + } + + if d.HasChange("labels") { + updateMask = append(updateMask, "labels") + } + // updateMask is a URL parameter but not present in the schema, so replaceVars + // won't set it + url, err = addQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) + if err != nil { + return err + } + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "PATCH", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return fmt.Errorf("Error updating Dataset %q: %s", d.Id(), err) + } else { + log.Printf("[DEBUG] Finished updating Dataset %q: %#v", d.Id(), res) + } + + err = vertexAIOperationWaitTime( + config, res, project, "Updating Dataset", userAgent, + d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return err + } + + return resourceVertexAIDatasetRead(d, meta) +} + +func resourceVertexAIDatasetDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + userAgent, err := generateUserAgentString(d, config.userAgent) + if err != nil { + return err + } + + billingProject := "" + + project, err := getProject(d, config) + if err != nil { + return fmt.Errorf("Error fetching project for Dataset: %s", err) + } + billingProject = project + + url, err := replaceVars(d, config, "{{VertexAIBasePath}}{{name}}") + if err != nil { + return err + } + + var obj map[string]interface{} + log.Printf("[DEBUG] Deleting Dataset %q", d.Id()) + + // err == nil indicates that the billing_project value was found + if bp, err := getBillingProject(d, config); err == nil { + billingProject = bp + } + + res, err := sendRequestWithTimeout(config, "DELETE", billingProject, url, userAgent, obj, d.Timeout(schema.TimeoutDelete)) + if err != nil { + return handleNotFoundError(err, d, "Dataset") + } + + err = vertexAIOperationWaitTime( + config, res, project, "Deleting Dataset", userAgent, + d.Timeout(schema.TimeoutDelete)) + + if err != nil { + return err + } + + log.Printf("[DEBUG] Finished deleting Dataset %q: %#v", d.Id(), res) + return nil +} + +func flattenVertexAIDatasetName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenVertexAIDatasetDisplayName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenVertexAIDatasetCreateTime(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenVertexAIDatasetUpdateTime(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenVertexAIDatasetLabels(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenVertexAIDatasetEncryptionSpec(v interface{}, d *schema.ResourceData, config *Config) interface{} { + if v == nil { + return nil + } + original := v.(map[string]interface{}) + if len(original) == 0 { + return nil + } + transformed := make(map[string]interface{}) + transformed["kms_key_name"] = + flattenVertexAIDatasetEncryptionSpecKmsKeyName(original["kmsKeyName"], d, config) + return []interface{}{transformed} +} +func flattenVertexAIDatasetEncryptionSpecKmsKeyName(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func flattenVertexAIDatasetMetadataSchemaUri(v interface{}, d *schema.ResourceData, config *Config) interface{} { + return v +} + +func expandVertexAIDatasetDisplayName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandVertexAIDatasetLabels(v interface{}, d TerraformResourceData, config *Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} + +func expandVertexAIDatasetEncryptionSpec(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + l := v.([]interface{}) + if len(l) == 0 || l[0] == nil { + return nil, nil + } + raw := l[0] + original := raw.(map[string]interface{}) + transformed := make(map[string]interface{}) + + transformedKmsKeyName, err := expandVertexAIDatasetEncryptionSpecKmsKeyName(original["kms_key_name"], d, config) + if err != nil { + return nil, err + } else if val := reflect.ValueOf(transformedKmsKeyName); val.IsValid() && !isEmptyValue(val) { + transformed["kmsKeyName"] = transformedKmsKeyName + } + + return transformed, nil +} + +func expandVertexAIDatasetEncryptionSpecKmsKeyName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandVertexAIDatasetMetadataSchemaUri(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} diff --git a/google/resource_vertex_ai_dataset_generated_test.go b/google/resource_vertex_ai_dataset_generated_test.go new file mode 100644 index 00000000000..dcbfe443b5c --- /dev/null +++ b/google/resource_vertex_ai_dataset_generated_test.go @@ -0,0 +1,86 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestAccVertexAIDataset_vertexAiDatasetExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckVertexAIDatasetDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccVertexAIDataset_vertexAiDatasetExample(context), + }, + }, + }) +} + +func testAccVertexAIDataset_vertexAiDatasetExample(context map[string]interface{}) string { + return Nprintf(` +resource "google_vertex_ai_dataset" "dataset" { + display_name = "terraform%{random_suffix}" + metadata_schema_uri = "gs://google-cloud-aiplatform/schema/dataset/metadata/image_1.0.0.yaml" + region = "us-central1" +} +`, context) +} + +func testAccCheckVertexAIDatasetDestroyProducer(t *testing.T) func(s *terraform.State) error { + return func(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_vertex_ai_dataset" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := googleProviderConfig(t) + + url, err := replaceVarsForTest(config, rs, "{{VertexAIBasePath}}{{name}}") + if err != nil { + return err + } + + billingProject := "" + + if config.BillingProject != "" { + billingProject = config.BillingProject + } + + _, err = sendRequest(config, "GET", billingProject, url, config.userAgent, nil) + if err == nil { + return fmt.Errorf("VertexAIDataset still exists at %s", url) + } + } + + return nil + } +} diff --git a/google/self_link_helpers.go b/google/self_link_helpers.go index fdc054abbf0..16767600e56 100644 --- a/google/self_link_helpers.go +++ b/google/self_link_helpers.go @@ -169,3 +169,15 @@ func GetRegionFromRegionSelfLink(selfLink string) string { } return selfLink } + +// This function supports selflinks that have regions and locations in their paths +func GetRegionFromRegionalSelfLink(selfLink string) string { + re := regexp.MustCompile("projects/[a-zA-Z0-9-]*/(?:locations|regions)/([a-zA-Z0-9-]*)") + switch { + case re.MatchString(selfLink): + if res := re.FindStringSubmatch(selfLink); len(res) == 2 && res[1] != "" { + return res[1] + } + } + return selfLink +} diff --git a/google/self_link_helpers_test.go b/google/self_link_helpers_test.go index a9b67434473..172b0adcc7d 100644 --- a/google/self_link_helpers_test.go +++ b/google/self_link_helpers_test.go @@ -122,3 +122,15 @@ func TestGetRegionFromRegionSelfLink(t *testing.T) { } } } + +func TestGetRegionFromRegionalSelfLink(t *testing.T) { + cases := map[string]string{ + "projects/foo/locations/europe-north1/datasets/bar/operations/foobar": "europe-north1", + "projects/REDACTED/regions/europe-north1/subnetworks/tf-test-net-xbwhsmlfm8": "europe-north1", + } + for input, expected := range cases { + if result := GetRegionFromRegionalSelfLink(input); result != expected { + t.Errorf("expected to get %q from %q, got %q", expected, input, result) + } + } +} diff --git a/google/vertex_ai_operation.go b/google/vertex_ai_operation.go new file mode 100644 index 00000000000..cc8f8a0ac28 --- /dev/null +++ b/google/vertex_ai_operation.go @@ -0,0 +1,64 @@ +package google + +import ( + "encoding/json" + "fmt" + "time" +) + +type VertexAIOperationWaiter struct { + Config *Config + UserAgent string + Project string + CommonOperationWaiter +} + +func (w *VertexAIOperationWaiter) QueryOp() (interface{}, error) { + if w == nil { + return nil, fmt.Errorf("Cannot query operation, it's unset or nil.") + } + + region := GetRegionFromRegionalSelfLink(w.CommonOperationWaiter.Op.Name) + + // Returns the proper get. + url := fmt.Sprintf("https://%s-aiplatform.googleapis.com/v1/%s", region, w.CommonOperationWaiter.Op.Name) + + return sendRequest(w.Config, "GET", w.Project, url, w.UserAgent, nil) +} + +func createVertexAIWaiter(config *Config, op map[string]interface{}, project, activity, userAgent string) (*VertexAIOperationWaiter, error) { + w := &VertexAIOperationWaiter{ + Config: config, + UserAgent: userAgent, + Project: project, + } + if err := w.CommonOperationWaiter.SetOp(op); err != nil { + return nil, err + } + return w, nil +} + +// nolint: deadcode,unused +func vertexAIOperationWaitTimeWithResponse(config *Config, op map[string]interface{}, response *map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + w, err := createVertexAIWaiter(config, op, project, activity, userAgent) + if err != nil { + return err + } + if err := OperationWait(w, activity, timeout, config.PollInterval); err != nil { + return err + } + return json.Unmarshal([]byte(w.CommonOperationWaiter.Op.Response), response) +} + +func vertexAIOperationWaitTime(config *Config, op map[string]interface{}, project, activity, userAgent string, timeout time.Duration) error { + if val, ok := op["name"]; !ok || val == "" { + // This was a synchronous call - there is no operation to wait for. + return nil + } + w, err := createVertexAIWaiter(config, op, project, activity, userAgent) + if err != nil { + // If w is nil, the op was synchronous. + return err + } + return OperationWait(w, activity, timeout, config.PollInterval) +} diff --git a/website/docs/r/vertex_ai_dataset.html.markdown b/website/docs/r/vertex_ai_dataset.html.markdown new file mode 100644 index 00000000000..5dacda565e3 --- /dev/null +++ b/website/docs/r/vertex_ai_dataset.html.markdown @@ -0,0 +1,122 @@ +--- +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in +# .github/CONTRIBUTING.md. +# +# ---------------------------------------------------------------------------- +subcategory: "Vertex AI" +layout: "google" +page_title: "Google: google_vertex_ai_dataset" +sidebar_current: "docs-google-vertex-ai-dataset" +description: |- + A collection of DataItems and Annotations on them. +--- + +# google\_vertex\_ai\_dataset + +A collection of DataItems and Annotations on them. + + +To get more information about Dataset, see: + +* [API documentation](https://cloud.google.com/vertex-ai/docs/reference/rest/v1/projects.locations.datasets) +* How-to Guides + * [Official Documentation](https://cloud.google.com/vertex-ai/docs) + + +## Example Usage - Vertex Ai Dataset + + +```hcl +resource "google_vertex_ai_dataset" "dataset" { + display_name = "terraform" + metadata_schema_uri = "gs://google-cloud-aiplatform/schema/dataset/metadata/image_1.0.0.yaml" + region = "us-central1" +} +``` + +## Argument Reference + +The following arguments are supported: + + +* `display_name` - + (Required) + The user-defined name of the Dataset. The name can be up to 128 characters long and can be consist of any UTF-8 characters. + +* `metadata_schema_uri` - + (Required) + Points to a YAML file stored on Google Cloud Storage describing additional information about the Dataset. The schema is defined as an OpenAPI 3.0.2 Schema Object. The schema files that can be used here are found in gs://google-cloud-aiplatform/schema/dataset/metadata/. + + +- - - + + +* `labels` - + (Optional) + A set of key/value label pairs to assign to this Workflow. + +* `encryption_spec` - + (Optional) + Customer-managed encryption key spec for a Dataset. If set, this Dataset and all sub-resources of this Dataset will be secured by this key. + Structure is documented below. + +* `region` - + (Optional) + The region of the dataset. eg us-central1 + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + + +The `encryption_spec` block supports: + +* `kms_key_name` - + (Optional) + Required. The Cloud KMS resource identifier of the customer managed encryption key used to protect a resource. + Has the form: projects/my-project/locations/my-region/keyRings/my-kr/cryptoKeys/my-key. The key needs to be in the same region as where the resource is created. + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + +* `id` - an identifier for the resource with format `{{name}}` + +* `name` - + The resource name of the Dataset. This value is set by Google. + +* `create_time` - + The timestamp of when the workflow was created in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. + +* `update_time` - + The timestamp of when the workflow was last updated in RFC3339 UTC "Zulu" format, with nanosecond resolution and up to nine fractional digits. + + +## Timeouts + +This resource provides the following +[Timeouts](/docs/configuration/resources.html#timeouts) configuration options: + +- `create` - Default is 6 minutes. +- `update` - Default is 6 minutes. +- `delete` - Default is 10 minutes. + +## Import + +This resource does not support import. + +## User Project Overrides + +This resource supports [User Project Overrides](https://www.terraform.io/docs/providers/google/guides/provider_reference.html#user_project_override). diff --git a/website/google.erb b/website/google.erb index baf9a84a5cf..47eb03d9437 100644 --- a/website/google.erb +++ b/website/google.erb @@ -2949,6 +2949,22 @@ +
  • + Vertex AI + +
  • +
  • Workflows