Skip to content

Commit

Permalink
Add scheduleOptions to BigQuery Data Transfer (#3895) (#7633)
Browse files Browse the repository at this point in the history
* Add scheduleOptions to BigQuery Data Transfer

* Fix typo

* Update set the timezone to UTC

* Update API type to Time

Signed-off-by: Modular Magician <[email protected]>
  • Loading branch information
modular-magician authored Oct 26, 2020
1 parent 3480359 commit 8ddaaa1
Show file tree
Hide file tree
Showing 4 changed files with 184 additions and 5 deletions.
3 changes: 3 additions & 0 deletions .changelog/3895.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
```release-note:enhancement
bigquery: added `schedule_options` field to `google_bigquery_data_transfer_config` resource
```
132 changes: 132 additions & 0 deletions google/resource_bigquery_data_transfer_config.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,45 @@ about the format here:
https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
NOTE: the granularity should be at least 8 hours, or less frequent.`,
},
"schedule_options": {
Type: schema.TypeList,
Optional: true,
Description: `Options customizing the data transfer schedule.`,
MaxItems: 1,
Elem: &schema.Resource{
Schema: map[string]*schema.Schema{
"disable_auto_scheduling": {
Type: schema.TypeBool,
Optional: true,
Description: `If true, automatic scheduling of data transfer runs for this
configuration will be disabled. The runs can be started on ad-hoc
basis using transferConfigs.startManualRuns API. When automatic
scheduling is disabled, the TransferConfig.schedule field will
be ignored.`,
AtLeastOneOf: []string{"schedule_options.0.disable_auto_scheduling", "schedule_options.0.start_time", "schedule_options.0.end_time"},
},
"end_time": {
Type: schema.TypeString,
Optional: true,
Description: `Defines time to stop scheduling transfer runs. A transfer run cannot be
scheduled at or after the end time. The end time can be changed at any
moment. The time when a data transfer can be triggered manually is not
limited by this option.`,
AtLeastOneOf: []string{"schedule_options.0.disable_auto_scheduling", "schedule_options.0.start_time", "schedule_options.0.end_time"},
},
"start_time": {
Type: schema.TypeString,
Optional: true,
Description: `Specifies time to start scheduling transfer runs. The first run will be
scheduled at or after the start time according to a recurrence pattern
defined in the schedule string. The start time can be changed at any
moment. The time when a data transfer can be triggered manually is not
limited by this option.`,
AtLeastOneOf: []string{"schedule_options.0.disable_auto_scheduling", "schedule_options.0.start_time", "schedule_options.0.end_time"},
},
},
},
},
"sensitive_params": {
Type: schema.TypeList,
Optional: true,
Expand Down Expand Up @@ -203,6 +242,12 @@ func resourceBigqueryDataTransferConfigCreate(d *schema.ResourceData, meta inter
} else if v, ok := d.GetOkExists("schedule"); !isEmptyValue(reflect.ValueOf(scheduleProp)) && (ok || !reflect.DeepEqual(v, scheduleProp)) {
obj["schedule"] = scheduleProp
}
scheduleOptionsProp, err := expandBigqueryDataTransferConfigScheduleOptions(d.Get("schedule_options"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("schedule_options"); !isEmptyValue(reflect.ValueOf(scheduleOptionsProp)) && (ok || !reflect.DeepEqual(v, scheduleOptionsProp)) {
obj["scheduleOptions"] = scheduleOptionsProp
}
notificationPubsubTopicProp, err := expandBigqueryDataTransferConfigNotificationPubsubTopic(d.Get("notification_pubsub_topic"), d, config)
if err != nil {
return err
Expand Down Expand Up @@ -351,6 +396,9 @@ func resourceBigqueryDataTransferConfigRead(d *schema.ResourceData, meta interfa
if err := d.Set("schedule", flattenBigqueryDataTransferConfigSchedule(res["schedule"], d, config)); err != nil {
return fmt.Errorf("Error reading Config: %s", err)
}
if err := d.Set("schedule_options", flattenBigqueryDataTransferConfigScheduleOptions(res["scheduleOptions"], d, config)); err != nil {
return fmt.Errorf("Error reading Config: %s", err)
}
if err := d.Set("notification_pubsub_topic", flattenBigqueryDataTransferConfigNotificationPubsubTopic(res["notificationPubsubTopic"], d, config)); err != nil {
return fmt.Errorf("Error reading Config: %s", err)
}
Expand Down Expand Up @@ -395,6 +443,12 @@ func resourceBigqueryDataTransferConfigUpdate(d *schema.ResourceData, meta inter
} else if v, ok := d.GetOkExists("schedule"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, scheduleProp)) {
obj["schedule"] = scheduleProp
}
scheduleOptionsProp, err := expandBigqueryDataTransferConfigScheduleOptions(d.Get("schedule_options"), d, config)
if err != nil {
return err
} else if v, ok := d.GetOkExists("schedule_options"); !isEmptyValue(reflect.ValueOf(v)) && (ok || !reflect.DeepEqual(v, scheduleOptionsProp)) {
obj["scheduleOptions"] = scheduleOptionsProp
}
notificationPubsubTopicProp, err := expandBigqueryDataTransferConfigNotificationPubsubTopic(d.Get("notification_pubsub_topic"), d, config)
if err != nil {
return err
Expand Down Expand Up @@ -441,6 +495,10 @@ func resourceBigqueryDataTransferConfigUpdate(d *schema.ResourceData, meta inter
updateMask = append(updateMask, "schedule")
}

if d.HasChange("schedule_options") {
updateMask = append(updateMask, "scheduleOptions")
}

if d.HasChange("notification_pubsub_topic") {
updateMask = append(updateMask, "notificationPubsubTopic")
}
Expand Down Expand Up @@ -548,6 +606,35 @@ func flattenBigqueryDataTransferConfigSchedule(v interface{}, d *schema.Resource
return v
}

func flattenBigqueryDataTransferConfigScheduleOptions(v interface{}, d *schema.ResourceData, config *Config) interface{} {
if v == nil {
return nil
}
original := v.(map[string]interface{})
if len(original) == 0 {
return nil
}
transformed := make(map[string]interface{})
transformed["disable_auto_scheduling"] =
flattenBigqueryDataTransferConfigScheduleOptionsDisableAutoScheduling(original["disableAutoScheduling"], d, config)
transformed["start_time"] =
flattenBigqueryDataTransferConfigScheduleOptionsStartTime(original["startTime"], d, config)
transformed["end_time"] =
flattenBigqueryDataTransferConfigScheduleOptionsEndTime(original["endTime"], d, config)
return []interface{}{transformed}
}
func flattenBigqueryDataTransferConfigScheduleOptionsDisableAutoScheduling(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}

func flattenBigqueryDataTransferConfigScheduleOptionsStartTime(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}

func flattenBigqueryDataTransferConfigScheduleOptionsEndTime(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}

func flattenBigqueryDataTransferConfigNotificationPubsubTopic(v interface{}, d *schema.ResourceData, config *Config) interface{} {
return v
}
Expand Down Expand Up @@ -603,6 +690,51 @@ func expandBigqueryDataTransferConfigSchedule(v interface{}, d TerraformResource
return v, nil
}

func expandBigqueryDataTransferConfigScheduleOptions(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
l := v.([]interface{})
if len(l) == 0 || l[0] == nil {
return nil, nil
}
raw := l[0]
original := raw.(map[string]interface{})
transformed := make(map[string]interface{})

transformedDisableAutoScheduling, err := expandBigqueryDataTransferConfigScheduleOptionsDisableAutoScheduling(original["disable_auto_scheduling"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedDisableAutoScheduling); val.IsValid() && !isEmptyValue(val) {
transformed["disableAutoScheduling"] = transformedDisableAutoScheduling
}

transformedStartTime, err := expandBigqueryDataTransferConfigScheduleOptionsStartTime(original["start_time"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedStartTime); val.IsValid() && !isEmptyValue(val) {
transformed["startTime"] = transformedStartTime
}

transformedEndTime, err := expandBigqueryDataTransferConfigScheduleOptionsEndTime(original["end_time"], d, config)
if err != nil {
return nil, err
} else if val := reflect.ValueOf(transformedEndTime); val.IsValid() && !isEmptyValue(val) {
transformed["endTime"] = transformedEndTime
}

return transformed, nil
}

func expandBigqueryDataTransferConfigScheduleOptionsDisableAutoScheduling(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}

func expandBigqueryDataTransferConfigScheduleOptionsStartTime(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}

func expandBigqueryDataTransferConfigScheduleOptionsEndTime(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}

func expandBigqueryDataTransferConfigNotificationPubsubTopic(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
return v, nil
}
Expand Down
24 changes: 19 additions & 5 deletions google/resource_bigquery_data_transfer_config_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"fmt"
"strings"
"testing"
"time"

"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
Expand Down Expand Up @@ -33,14 +34,17 @@ func TestAccBigqueryDataTransferConfig(t *testing.T) {

func testAccBigqueryDataTransferConfig_scheduledQuery_basic(t *testing.T) {
random_suffix := randString(t, 10)
now := time.Now().UTC()
start_time := now.Add(1 * time.Hour).Format(time.RFC3339)
end_time := now.AddDate(0, 1, 0).Format(time.RFC3339)

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "third", "y"),
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "third", start_time, end_time, "y"),
},
{
ResourceName: "google_bigquery_data_transfer_config.query_config",
Expand All @@ -54,17 +58,22 @@ func testAccBigqueryDataTransferConfig_scheduledQuery_basic(t *testing.T) {

func testAccBigqueryDataTransferConfig_scheduledQuery_update(t *testing.T) {
random_suffix := randString(t, 10)
now := time.Now().UTC()
first_start_time := now.Add(1 * time.Hour).Format(time.RFC3339)
first_end_time := now.AddDate(0, 1, 0).Format(time.RFC3339)
second_start_time := now.Add(2 * time.Hour).Format(time.RFC3339)
second_end_time := now.AddDate(0, 2, 0).Format(time.RFC3339)

vcrTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigqueryDataTransferConfigDestroyProducer(t),
Steps: []resource.TestStep{
{
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "first", "y"),
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "first", first_start_time, first_end_time, "y"),
},
{
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "second", "z"),
Config: testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, "second", second_start_time, second_end_time, "z"),
},
{
ResourceName: "google_bigquery_data_transfer_config.query_config",
Expand Down Expand Up @@ -145,7 +154,7 @@ func testAccCheckBigqueryDataTransferConfigDestroyProducer(t *testing.T) func(s
}
}

func testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, schedule, letter string) string {
func testAccBigqueryDataTransferConfig_scheduledQuery(random_suffix, schedule, start_time, end_time, letter string) string {
return fmt.Sprintf(`
data "google_project" "project" {}
Expand Down Expand Up @@ -175,6 +184,11 @@ resource "google_bigquery_data_transfer_config" "query_config" {
location = "asia-northeast1"
data_source_id = "scheduled_query"
schedule = "%s sunday of quarter 00:00"
schedule_options {
disable_auto_scheduling = false
start_time = "%s"
end_time = "%s"
}
destination_dataset_id = google_bigquery_dataset.my_dataset.dataset_id
notification_pubsub_topic = google_pubsub_topic.my_topic.id
params = {
Expand All @@ -183,7 +197,7 @@ resource "google_bigquery_data_transfer_config" "query_config" {
query = "SELECT name FROM tabl WHERE x = '%s'"
}
}
`, random_suffix, random_suffix, random_suffix, schedule, letter)
`, random_suffix, random_suffix, random_suffix, schedule, start_time, end_time, letter)
}

func testAccBigqueryDataTransferConfig_scheduledQuery_service_account(random_suffix string) string {
Expand Down
30 changes: 30 additions & 0 deletions website/docs/r/bigquery_data_transfer_config.html.markdown
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,11 @@ The following arguments are supported:
https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
NOTE: the granularity should be at least 8 hours, or less frequent.

* `schedule_options` -
(Optional)
Options customizing the data transfer schedule.
Structure is documented below.

* `notification_pubsub_topic` -
(Optional)
Pub/Sub topic where notifications will be sent after transfer runs
Expand Down Expand Up @@ -150,6 +155,31 @@ The following arguments are supported:
If it is not provided, the provider project is used.


The `schedule_options` block supports:

* `disable_auto_scheduling` -
(Optional)
If true, automatic scheduling of data transfer runs for this
configuration will be disabled. The runs can be started on ad-hoc
basis using transferConfigs.startManualRuns API. When automatic
scheduling is disabled, the TransferConfig.schedule field will
be ignored.

* `start_time` -
(Optional)
Specifies time to start scheduling transfer runs. The first run will be
scheduled at or after the start time according to a recurrence pattern
defined in the schedule string. The start time can be changed at any
moment. The time when a data transfer can be triggered manually is not
limited by this option.

* `end_time` -
(Optional)
Defines time to stop scheduling transfer runs. A transfer run cannot be
scheduled at or after the end time. The end time can be changed at any
moment. The time when a data transfer can be triggered manually is not
limited by this option.

The `sensitive_params` block supports:

* `secret_access_key` -
Expand Down

0 comments on commit 8ddaaa1

Please sign in to comment.