diff --git a/google-beta/config.go b/google-beta/config.go index 9ece2a4f50..38a8c2f171 100644 --- a/google-beta/config.go +++ b/google-beta/config.go @@ -72,6 +72,7 @@ type Config struct { AccessContextManagerBasePath string AppEngineBasePath string + BigqueryDataTransferBasePath string BinaryAuthorizationBasePath string CloudBuildBasePath string CloudRunBasePath string diff --git a/google-beta/error_retry_predicates.go b/google-beta/error_retry_predicates.go new file mode 100644 index 0000000000..7518540a4b --- /dev/null +++ b/google-beta/error_retry_predicates.go @@ -0,0 +1,19 @@ +package google + +import ( + "strings" + + "google.golang.org/api/googleapi" +) + +// If a permission necessary to provision a resource is created in the same config +// as the resource itself, the permission may not have propagated by the time terraform +// attempts to create the resource. This allows those errors to be retried until the timeout expires +func iamMemberMissing(err error) (bool, string) { + if gerr, ok := err.(*googleapi.Error); ok { + if gerr.Code == 400 && strings.Contains(gerr.Body, "permission") { + return true, "Waiting for IAM member permissions to propagate." + } + } + return false, "" +} diff --git a/google-beta/provider.go b/google-beta/provider.go index f3e7955780..36c678c811 100644 --- a/google-beta/provider.go +++ b/google-beta/provider.go @@ -100,6 +100,7 @@ func Provider() terraform.ResourceProvider { // Generated Products AccessContextManagerCustomEndpointEntryKey: AccessContextManagerCustomEndpointEntry, AppEngineCustomEndpointEntryKey: AppEngineCustomEndpointEntry, + BigqueryDataTransferCustomEndpointEntryKey: BigqueryDataTransferCustomEndpointEntry, BinaryAuthorizationCustomEndpointEntryKey: BinaryAuthorizationCustomEndpointEntry, CloudBuildCustomEndpointEntryKey: CloudBuildCustomEndpointEntry, CloudRunCustomEndpointEntryKey: CloudRunCustomEndpointEntry, @@ -221,6 +222,7 @@ func ResourceMapWithErrors() (map[string]*schema.Resource, error) { return mergeResourceMaps( GeneratedAccessContextManagerResourcesMap, GeneratedAppEngineResourcesMap, + GeneratedBigqueryDataTransferResourcesMap, GeneratedBinaryAuthorizationResourcesMap, GeneratedCloudBuildResourcesMap, GeneratedCloudRunResourcesMap, @@ -412,6 +414,7 @@ func providerConfigure(d *schema.ResourceData) (interface{}, error) { // Generated products config.AccessContextManagerBasePath = d.Get(AccessContextManagerCustomEndpointEntryKey).(string) config.AppEngineBasePath = d.Get(AppEngineCustomEndpointEntryKey).(string) + config.BigqueryDataTransferBasePath = d.Get(BigqueryDataTransferCustomEndpointEntryKey).(string) config.BinaryAuthorizationBasePath = d.Get(BinaryAuthorizationCustomEndpointEntryKey).(string) config.CloudBuildBasePath = d.Get(CloudBuildCustomEndpointEntryKey).(string) config.CloudRunBasePath = d.Get(CloudRunCustomEndpointEntryKey).(string) @@ -475,6 +478,7 @@ func ConfigureBasePaths(c *Config) { // Generated Products c.AccessContextManagerBasePath = AccessContextManagerDefaultBasePath c.AppEngineBasePath = AppEngineDefaultBasePath + c.BigqueryDataTransferBasePath = BigqueryDataTransferDefaultBasePath c.BinaryAuthorizationBasePath = BinaryAuthorizationDefaultBasePath c.CloudBuildBasePath = CloudBuildDefaultBasePath c.CloudRunBasePath = CloudRunDefaultBasePath diff --git a/google-beta/provider_bigquery_data_transfer_gen.go b/google-beta/provider_bigquery_data_transfer_gen.go new file mode 100644 index 0000000000..9bf4ef2b67 --- /dev/null +++ b/google-beta/provider_bigquery_data_transfer_gen.go @@ -0,0 +1,34 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import "github.com/hashicorp/terraform/helper/schema" + +// If the base path has changed as a result of your PR, make sure to update +// the provider_reference page! +var BigqueryDataTransferDefaultBasePath = "https://bigquerydatatransfer.googleapis.com/v1/" +var BigqueryDataTransferCustomEndpointEntryKey = "bigquery_data_transfer_custom_endpoint" +var BigqueryDataTransferCustomEndpointEntry = &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ValidateFunc: validateCustomEndpoint, + DefaultFunc: schema.MultiEnvDefaultFunc([]string{ + "GOOGLE_BIGQUERY_DATA_TRANSFER_CUSTOM_ENDPOINT", + }, BigqueryDataTransferDefaultBasePath), +} + +var GeneratedBigqueryDataTransferResourcesMap = map[string]*schema.Resource{ + "google_bigquery_data_transfer_config": resourceBigqueryDataTransferConfig(), +} diff --git a/google-beta/resource_bigquery_data_transfer_config.go b/google-beta/resource_bigquery_data_transfer_config.go new file mode 100644 index 0000000000..544aff3d74 --- /dev/null +++ b/google-beta/resource_bigquery_data_transfer_config.go @@ -0,0 +1,403 @@ +// ---------------------------------------------------------------------------- +// +// *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +// +// ---------------------------------------------------------------------------- +// +// This file is automatically generated by Magic Modules and manual +// changes will be clobbered when the file is regenerated. +// +// Please read more about how to change this file in +// .github/CONTRIBUTING.md. +// +// ---------------------------------------------------------------------------- + +package google + +import ( + "fmt" + "log" + "reflect" + "strconv" + "strings" + "time" + + "github.com/hashicorp/terraform/helper/schema" +) + +func resourceBigqueryDataTransferConfig() *schema.Resource { + return &schema.Resource{ + Create: resourceBigqueryDataTransferConfigCreate, + Read: resourceBigqueryDataTransferConfigRead, + Update: resourceBigqueryDataTransferConfigUpdate, + Delete: resourceBigqueryDataTransferConfigDelete, + + Importer: &schema.ResourceImporter{ + State: resourceBigqueryDataTransferConfigImport, + }, + + Timeouts: &schema.ResourceTimeout{ + Create: schema.DefaultTimeout(4 * time.Minute), + Update: schema.DefaultTimeout(4 * time.Minute), + Delete: schema.DefaultTimeout(4 * time.Minute), + }, + + Schema: map[string]*schema.Schema{ + "data_source_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "destination_dataset_id": { + Type: schema.TypeString, + Required: true, + }, + "display_name": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "params": { + Type: schema.TypeMap, + Required: true, + Elem: &schema.Schema{Type: schema.TypeString}, + }, + "data_refresh_window_days": { + Type: schema.TypeInt, + Optional: true, + }, + "disabled": { + Type: schema.TypeBool, + Optional: true, + }, + "location": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Default: "US", + }, + "schedule": { + Type: schema.TypeString, + Optional: true, + }, + "name": { + Type: schema.TypeString, + Computed: true, + }, + "project": { + Type: schema.TypeString, + Optional: true, + Computed: true, + ForceNew: true, + }, + }, + } +} + +func resourceBigqueryDataTransferConfigCreate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + obj := make(map[string]interface{}) + displayNameProp, err := expandBigqueryDataTransferConfigDisplayName(d.Get("display_name"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("display_name"); !isEmptyValue(reflect.ValueOf(displayNameProp)) && (ok || !reflect.DeepEqual(v, displayNameProp)) { + obj["displayName"] = displayNameProp + } + destinationDatasetIdProp, err := expandBigqueryDataTransferConfigDestinationDatasetId(d.Get("destination_dataset_id"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("destination_dataset_id"); !isEmptyValue(reflect.ValueOf(destinationDatasetIdProp)) && (ok || !reflect.DeepEqual(v, destinationDatasetIdProp)) { + obj["destinationDatasetId"] = destinationDatasetIdProp + } + dataSourceIdProp, err := expandBigqueryDataTransferConfigDataSourceId(d.Get("data_source_id"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("data_source_id"); !isEmptyValue(reflect.ValueOf(dataSourceIdProp)) && (ok || !reflect.DeepEqual(v, dataSourceIdProp)) { + obj["dataSourceId"] = dataSourceIdProp + } + scheduleProp, err := expandBigqueryDataTransferConfigSchedule(d.Get("schedule"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("schedule"); !isEmptyValue(reflect.ValueOf(scheduleProp)) && (ok || !reflect.DeepEqual(v, scheduleProp)) { + obj["schedule"] = scheduleProp + } + dataRefreshWindowDaysProp, err := expandBigqueryDataTransferConfigDataRefreshWindowDays(d.Get("data_refresh_window_days"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("data_refresh_window_days"); !isEmptyValue(reflect.ValueOf(dataRefreshWindowDaysProp)) && (ok || !reflect.DeepEqual(v, dataRefreshWindowDaysProp)) { + obj["dataRefreshWindowDays"] = dataRefreshWindowDaysProp + } + disabledProp, err := expandBigqueryDataTransferConfigDisabled(d.Get("disabled"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("disabled"); !isEmptyValue(reflect.ValueOf(disabledProp)) && (ok || !reflect.DeepEqual(v, disabledProp)) { + obj["disabled"] = disabledProp + } + paramsProp, err := expandBigqueryDataTransferConfigParams(d.Get("params"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("params"); !isEmptyValue(reflect.ValueOf(paramsProp)) && (ok || !reflect.DeepEqual(v, paramsProp)) { + obj["params"] = paramsProp + } + + url, err := replaceVars(d, config, "{{BigqueryDataTransferBasePath}}projects/{{project}}/locations/{{location}}/transferConfigs") + if err != nil { + return err + } + + log.Printf("[DEBUG] Creating new Config: %#v", obj) + res, err := sendRequestWithTimeout(config, "POST", url, obj, d.Timeout(schema.TimeoutCreate), iamMemberMissing) + if err != nil { + return fmt.Errorf("Error creating Config: %s", err) + } + + // Store the ID now + id, err := replaceVars(d, config, "{{name}}") + if err != nil { + return fmt.Errorf("Error constructing id: %s", err) + } + d.SetId(id) + + log.Printf("[DEBUG] Finished creating Config %q: %#v", d.Id(), res) + + // `name` is autogenerated from the api so needs to be set post-create + name, ok := res["name"] + if !ok { + return fmt.Errorf("Create response didn't contain critical fields. Create may not have succeeded.") + } + d.Set("name", name.(string)) + d.SetId(name.(string)) + + return resourceBigqueryDataTransferConfigRead(d, meta) +} + +func resourceBigqueryDataTransferConfigRead(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + url, err := replaceVars(d, config, "{{BigqueryDataTransferBasePath}}{{name}}") + if err != nil { + return err + } + + res, err := sendRequest(config, "GET", url, nil) + if err != nil { + return handleNotFoundError(err, d, fmt.Sprintf("BigqueryDataTransferConfig %q", d.Id())) + } + + project, err := getProject(d, config) + if err != nil { + return err + } + if err := d.Set("project", project); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + + if err := d.Set("display_name", flattenBigqueryDataTransferConfigDisplayName(res["displayName"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("name", flattenBigqueryDataTransferConfigName(res["name"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("destination_dataset_id", flattenBigqueryDataTransferConfigDestinationDatasetId(res["destinationDatasetId"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("data_source_id", flattenBigqueryDataTransferConfigDataSourceId(res["dataSourceId"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("schedule", flattenBigqueryDataTransferConfigSchedule(res["schedule"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("data_refresh_window_days", flattenBigqueryDataTransferConfigDataRefreshWindowDays(res["dataRefreshWindowDays"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("disabled", flattenBigqueryDataTransferConfigDisabled(res["disabled"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + if err := d.Set("params", flattenBigqueryDataTransferConfigParams(res["params"], d)); err != nil { + return fmt.Errorf("Error reading Config: %s", err) + } + + return nil +} + +func resourceBigqueryDataTransferConfigUpdate(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + obj := make(map[string]interface{}) + destinationDatasetIdProp, err := expandBigqueryDataTransferConfigDestinationDatasetId(d.Get("destination_dataset_id"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("destination_dataset_id"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, destinationDatasetIdProp)) { + obj["destinationDatasetId"] = destinationDatasetIdProp + } + scheduleProp, err := expandBigqueryDataTransferConfigSchedule(d.Get("schedule"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("schedule"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, scheduleProp)) { + obj["schedule"] = scheduleProp + } + dataRefreshWindowDaysProp, err := expandBigqueryDataTransferConfigDataRefreshWindowDays(d.Get("data_refresh_window_days"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("data_refresh_window_days"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, dataRefreshWindowDaysProp)) { + obj["dataRefreshWindowDays"] = dataRefreshWindowDaysProp + } + disabledProp, err := expandBigqueryDataTransferConfigDisabled(d.Get("disabled"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("disabled"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, disabledProp)) { + obj["disabled"] = disabledProp + } + paramsProp, err := expandBigqueryDataTransferConfigParams(d.Get("params"), d, config) + if err != nil { + return err + } else if v, ok := d.GetOkExists("params"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, paramsProp)) { + obj["params"] = paramsProp + } + + url, err := replaceVars(d, config, "{{BigqueryDataTransferBasePath}}{{name}}") + if err != nil { + return err + } + + log.Printf("[DEBUG] Updating Config %q: %#v", d.Id(), obj) + updateMask := []string{} + + if d.HasChange("destination_dataset_id") { + updateMask = append(updateMask, "destinationDatasetId") + } + + if d.HasChange("schedule") { + updateMask = append(updateMask, "schedule") + } + + if d.HasChange("data_refresh_window_days") { + updateMask = append(updateMask, "dataRefreshWindowDays") + } + + if d.HasChange("disabled") { + updateMask = append(updateMask, "disabled") + } + + if d.HasChange("params") { + updateMask = append(updateMask, "params") + } + // updateMask is a URL parameter but not present in the schema, so replaceVars + // won't set it + url, err = addQueryParams(url, map[string]string{"updateMask": strings.Join(updateMask, ",")}) + if err != nil { + return err + } + _, err = sendRequestWithTimeout(config, "PATCH", url, obj, d.Timeout(schema.TimeoutUpdate)) + + if err != nil { + return fmt.Errorf("Error updating Config %q: %s", d.Id(), err) + } + + return resourceBigqueryDataTransferConfigRead(d, meta) +} + +func resourceBigqueryDataTransferConfigDelete(d *schema.ResourceData, meta interface{}) error { + config := meta.(*Config) + + url, err := replaceVars(d, config, "{{BigqueryDataTransferBasePath}}{{name}}") + if err != nil { + return err + } + + var obj map[string]interface{} + log.Printf("[DEBUG] Deleting Config %q", d.Id()) + res, err := sendRequestWithTimeout(config, "DELETE", url, obj, d.Timeout(schema.TimeoutDelete)) + if err != nil { + return handleNotFoundError(err, d, "Config") + } + + log.Printf("[DEBUG] Finished deleting Config %q: %#v", d.Id(), res) + return nil +} + +func resourceBigqueryDataTransferConfigImport(d *schema.ResourceData, meta interface{}) ([]*schema.ResourceData, error) { + + config := meta.(*Config) + + // current import_formats can't import fields with forward slashes in their value + if err := parseImportId([]string{"(?P.+)"}, d, config); err != nil { + return nil, err + } + + return []*schema.ResourceData{d}, nil +} + +func flattenBigqueryDataTransferConfigDisplayName(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigName(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigDestinationDatasetId(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigDataSourceId(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigSchedule(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigDataRefreshWindowDays(v interface{}, d *schema.ResourceData) interface{} { + // Handles the string fixed64 format + if strVal, ok := v.(string); ok { + if intVal, err := strconv.ParseInt(strVal, 10, 64); err == nil { + return intVal + } // let terraform core handle it if we can't convert the string to an int. + } + return v +} + +func flattenBigqueryDataTransferConfigDisabled(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func flattenBigqueryDataTransferConfigParams(v interface{}, d *schema.ResourceData) interface{} { + return v +} + +func expandBigqueryDataTransferConfigDisplayName(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigDestinationDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigDataSourceId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigSchedule(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigDataRefreshWindowDays(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigDisabled(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { + return v, nil +} + +func expandBigqueryDataTransferConfigParams(v interface{}, d TerraformResourceData, config *Config) (map[string]string, error) { + if v == nil { + return map[string]string{}, nil + } + m := make(map[string]string) + for k, val := range v.(map[string]interface{}) { + m[k] = val.(string) + } + return m, nil +} diff --git a/google-beta/resource_bigquery_data_transfer_config_test.go b/google-beta/resource_bigquery_data_transfer_config_test.go new file mode 100644 index 0000000000..9ef709b187 --- /dev/null +++ b/google-beta/resource_bigquery_data_transfer_config_test.go @@ -0,0 +1,137 @@ +package google + +import ( + "fmt" + "strings" + "testing" + + "github.com/hashicorp/terraform/helper/acctest" + "github.com/hashicorp/terraform/helper/resource" + "github.com/hashicorp/terraform/terraform" +) + +// The service account TF uses needs the permission granted in the configs +// but it will get deleted by parallel tests, so they need to be ran serially. +func TestAccBigqueryDataTransferConfig(t *testing.T) { + testCases := map[string]func(t *testing.T){ + "basic": testAccBigqueryDataTransferConfig_scheduledQuery_basic, + "update": testAccBigqueryDataTransferConfig_scheduledQuery_update, + } + + for name, tc := range testCases { + // shadow the tc variable into scope so that when + // the loop continues, if t.Run hasn't executed tc(t) + // yet, we don't have a race condition + // see https://github.com/golang/go/wiki/CommonMistakes#using-goroutines-on-loop-iterator-variables + tc := tc + t.Run(name, func(t *testing.T) { + tc(t) + }) + } +} + +func testAccBigqueryDataTransferConfig_scheduledQuery_basic(t *testing.T) { + random_suffix := acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "third", "y"), + }, + { + ResourceName: "google_bigquery_data_transfer_config.query_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + }, + }) +} + +func testAccBigqueryDataTransferConfig_scheduledQuery_update(t *testing.T) { + random_suffix := acctest.RandString(10) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroy, + Steps: []resource.TestStep{ + { + Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "first", "y"), + }, + { + Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "second", "z"), + }, + { + ResourceName: "google_bigquery_data_transfer_config.query_config", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"location"}, + }, + }, + }) +} + +func testAccCheckBigqueryDataTransferConfigDestroy(s *terraform.State) error { + for name, rs := range s.RootModule().Resources { + if rs.Type != "google_bigquery_data_transfer_config" { + continue + } + if strings.HasPrefix(name, "data.") { + continue + } + + config := testAccProvider.Meta().(*Config) + + url, err := replaceVarsForTest(config, rs, "{{BigqueryDataTransferBasePath}}{{name}}") + if err != nil { + return err + } + + _, err = sendRequest(config, "GET", url, nil) + if err == nil { + return fmt.Errorf("BigqueryDataTransferConfig still exists at %s", url) + } + } + + return nil +} + +func testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, schedule, letter string) string { + return fmt.Sprintf(` +data "google_project" "project" {} + +resource "google_project_iam_member" "permissions" { + role = "roles/iam.serviceAccountShortTermTokenMinter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com" +} + + +resource "google_bigquery_dataset" "my_dataset" { + depends_on = [google_project_iam_member.permissions] + + dataset_id = "my_dataset%s" + friendly_name = "foo" + description = "bar" + location = "asia-northeast1" +} + +resource "google_bigquery_data_transfer_config" "query_config" { + depends_on = [google_project_iam_member.permissions] + + display_name = "my-query-%s" + location = "asia-northeast1" + data_source_id = "scheduled_query" + schedule = "%s sunday of quarter 00:00" + destination_dataset_id = google_bigquery_dataset.my_dataset.dataset_id + params = { + destination_table_name_template = "my-table" + write_disposition = "WRITE_APPEND" + query = "SELECT name FROM tabl WHERE x = '%s'" + } +} +`, random_suffix, random_suffix, schedule, letter) +} diff --git a/google-beta/transport.go b/google-beta/transport.go index dd5566d9f4..2aecd7750c 100644 --- a/google-beta/transport.go +++ b/google-beta/transport.go @@ -38,7 +38,7 @@ func sendRequest(config *Config, method, rawurl string, body map[string]interfac return sendRequestWithTimeout(config, method, rawurl, body, DefaultRequestTimeout) } -func sendRequestWithTimeout(config *Config, method, rawurl string, body map[string]interface{}, timeout time.Duration) (map[string]interface{}, error) { +func sendRequestWithTimeout(config *Config, method, rawurl string, body map[string]interface{}, timeout time.Duration, errorRetryPredicates ...func(e error) (bool, string)) (map[string]interface{}, error) { reqHeaders := make(http.Header) reqHeaders.Set("User-Agent", config.userAgent) reqHeaders.Set("Content-Type", "application/json") @@ -81,6 +81,7 @@ func sendRequestWithTimeout(config *Config, method, rawurl string, body map[stri return nil }, timeout, + errorRetryPredicates..., ) if err != nil { return nil, err diff --git a/google-beta/utils.go b/google-beta/utils.go index f6365bbe52..9adfd34efb 100644 --- a/google-beta/utils.go +++ b/google-beta/utils.go @@ -362,14 +362,14 @@ func retryTime(retryFunc func() error, minutes int) error { return retryTimeDuration(retryFunc, time.Duration(minutes)*time.Minute) } -func retryTimeDuration(retryFunc func() error, duration time.Duration) error { +func retryTimeDuration(retryFunc func() error, duration time.Duration, errorRetryPredicates ...func(e error) (bool, string)) error { return resource.Retry(duration, func() *resource.RetryError { err := retryFunc() if err == nil { return nil } for _, e := range errwrap.GetAllType(err, &googleapi.Error{}) { - if isRetryableError(e) { + if isRetryableError(e, errorRetryPredicates) { return resource.RetryableError(e) } } @@ -377,29 +377,41 @@ func retryTimeDuration(retryFunc func() error, duration time.Duration) error { }) } -func isRetryableError(err error) bool { - if gerr, ok := err.(*googleapi.Error); ok && (gerr.Code == 429 || gerr.Code == 500 || gerr.Code == 502 || gerr.Code == 503) { - log.Printf("[DEBUG] Dismissed an error as retryable based on error code: %s", err) - return true - } +func isRetryableError(err error, retryPredicates []func(e error) (bool, string)) bool { + // These operations are always hitting googleapis.com - they should rarely // time out, and if they do, that timeout is retryable. if urlerr, ok := err.(*url.Error); ok && urlerr.Timeout() { log.Printf("[DEBUG] Dismissed an error as retryable based on googleapis.com target: %s", err) return true } - if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == 409 && strings.Contains(gerr.Body, "operationInProgress") { - // 409's are retried because cloud sql throws a 409 when concurrent calls are made. - // The only way right now to determine it is a SQL 409 due to concurrent calls is to - // look at the contents of the error message. - // See https://github.com/terraform-providers/terraform-provider-google/issues/3279 - log.Printf("[DEBUG] Dismissed an error as retryable based on error code 409 and error reason 'operationInProgress': %s", err) - return true - } - if gerr, ok := err.(*googleapi.Error); ok && (gerr.Code == 412) && isFingerprintError(err) { - log.Printf("[DEBUG] Dismissed an error as retryable as a fingerprint mismatch: %s", err) - return true + if gerr, ok := err.(*googleapi.Error); ok { + if gerr.Code == 429 || gerr.Code == 500 || gerr.Code == 502 || gerr.Code == 503 { + log.Printf("[DEBUG] Dismissed an error as retryable based on error code: %s", err) + return true + } + + if gerr.Code == 409 && strings.Contains(gerr.Body, "operationInProgress") { + // 409's are retried because cloud sql throws a 409 when concurrent calls are made. + // The only way right now to determine it is a SQL 409 due to concurrent calls is to + // look at the contents of the error message. + // See https://github.com/terraform-providers/terraform-provider-google/issues/3279 + log.Printf("[DEBUG] Dismissed an error as retryable based on error code 409 and error reason 'operationInProgress': %s", err) + return true + } + + if gerr.Code == 412 && isFingerprintError(err) { + log.Printf("[DEBUG] Dismissed an error as retryable as a fingerprint mismatch: %s", err) + return true + } + + } + for _, pred := range retryPredicates { + if retry, reason := (pred(err)); retry { + log.Printf("[DEBUG] Dismissed an error as retryable. %s - %s", reason, err) + return true + } } return false diff --git a/website/docs/r/bigquery_data_transfer_config.html.markdown b/website/docs/r/bigquery_data_transfer_config.html.markdown new file mode 100644 index 0000000000..edae43a6e0 --- /dev/null +++ b/website/docs/r/bigquery_data_transfer_config.html.markdown @@ -0,0 +1,159 @@ +--- +# ---------------------------------------------------------------------------- +# +# *** AUTO GENERATED CODE *** AUTO GENERATED CODE *** +# +# ---------------------------------------------------------------------------- +# +# This file is automatically generated by Magic Modules and manual +# changes will be clobbered when the file is regenerated. +# +# Please read more about how to change this file in +# .github/CONTRIBUTING.md. +# +# ---------------------------------------------------------------------------- +layout: "google" +page_title: "Google: google_bigquery_data_transfer_config" +sidebar_current: "docs-google-bigquery-data-transfer-config" +description: |- + Represents a data transfer configuration. +--- + +# google\_bigquery\_data\_transfer\_config + +Represents a data transfer configuration. A transfer configuration +contains all metadata needed to perform a data transfer. + + +To get more information about Config, see: + +* [API documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/v1/projects.locations.transferConfigs/create) +* How-to Guides + * [Official Documentation](https://cloud.google.com/bigquery/docs/reference/datatransfer/rest/) + +## Example Usage - Scheduled Query + + +```hcl +data "google_project" "project" {} + +resource "google_project_iam_member" "permissions" { + role = "roles/iam.serviceAccountShortTermTokenMinter" + member = "serviceAccount:service-${data.google_project.project.number}@gcp-sa-bigquerydatatransfer.iam.gserviceaccount.com" +} + +resource "google_bigquery_data_transfer_config" "query_config" { + + depends_on = [google_project_iam_member.permissions] + + display_name = "my-query" + location = "asia-northeast1" + data_source_id = "scheduled_query" + schedule = "first sunday of quarter 00:00" + destination_dataset_id = "${google_bigquery_dataset.my_dataset.dataset_id}" + params = { + destination_table_name_template = "my-table" + write_disposition = "WRITE_APPEND" + query = "SELECT name FROM tabl WHERE x = 'y'" + } +} + +resource "google_bigquery_dataset" "my_dataset" { + + depends_on = [google_project_iam_member.permissions] + + dataset_id = "my_dataset" + friendly_name = "foo" + description = "bar" + location = "asia-northeast1" +} +``` + +## Argument Reference + +The following arguments are supported: + + +* `display_name` - + (Required) + The user specified display name for the transfer config. + +* `destination_dataset_id` - + (Required) + The BigQuery target dataset id. + +* `data_source_id` - + (Required) + The data source id. Cannot be changed once the transfer config is created. + +* `params` - + (Required) + These parameters are specific to each data source. + + +- - - + + +* `schedule` - + (Optional) + Data transfer schedule. If the data source does not support a custom + schedule, this should be empty. If it is empty, the default value for + the data source will be used. The specified times are in UTC. Examples + of valid format: 1st,3rd monday of month 15:30, every wed,fri of jan, + jun 13:15, and first sunday of quarter 00:00. See more explanation + about the format here: + https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format + NOTE: the granularity should be at least 8 hours, or less frequent. + +* `data_refresh_window_days` - + (Optional) + The number of days to look back to automatically refresh the data. + For example, if dataRefreshWindowDays = 10, then every day BigQuery + reingests data for [today-10, today-1], rather than ingesting data for + just [today-1]. Only valid if the data source supports the feature. + Set the value to 0 to use the default value. + +* `disabled` - + (Optional) + When set to true, no runs are scheduled for a given transfer. + +* `location` - + (Optional) + The geographic location where the transfer config should reside. + Examples: US, EU, asia-northeast1. The default value is US. + +* `project` - (Optional) The ID of the project in which the resource belongs. + If it is not provided, the provider project is used. + + +## Attributes Reference + +In addition to the arguments listed above, the following computed attributes are exported: + + +* `name` - + The resource name of the transfer config. Transfer config names have the + form projects/{projectId}/locations/{location}/transferConfigs/{configId}. + Where configId is usually a uuid, but this is not required. + The name is ignored when creating a transfer config. + + +## Timeouts + +This resource provides the following +[Timeouts](/docs/configuration/resources.html#timeouts) configuration options: + +- `create` - Default is 4 minutes. +- `update` - Default is 4 minutes. +- `delete` - Default is 4 minutes. + +## Import + +Config can be imported using any of these accepted formats: + +``` +$ terraform import google_bigquery_data_transfer_config.default {{name}} +``` + +-> If you're importing a resource with beta features, make sure to include `-provider=google-beta` +as an argument so that Terraform uses the correct provider to import your resource.