From 70156afdaf7aa253d651b7bfca46174c5399c257 Mon Sep 17 00:00:00 2001 From: Modular Magician Date: Thu, 25 Jun 2020 19:43:56 +0000 Subject: [PATCH] Add hourly partitioning to bigquery table (upstream PR) (#3707) * Add TODO for checking the go library for HOUR support, update description and validation value. * Remove TODO from code and run gofmt. * Test table creation with HOUR instead of DAY. * Update google/resource_bigquery_table.go * Add testAccBigQueryTableHourlyTimePartitioning test. Co-authored-by: fpopic Signed-off-by: Modular Magician --- .changelog/3707.txt | 3 + google/resource_bigquery_table.go | 8 +-- google/resource_bigquery_table_test.go | 92 +++++++++++++++++++++++++- 3 files changed, 97 insertions(+), 6 deletions(-) create mode 100644 .changelog/3707.txt diff --git a/.changelog/3707.txt b/.changelog/3707.txt new file mode 100644 index 00000000000..a43775c5dbe --- /dev/null +++ b/.changelog/3707.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +bigquery: Added `"HOUR"` option for `google_bigquery_table` time partitioning (`type`) +``` diff --git a/google/resource_bigquery_table.go b/google/resource_bigquery_table.go index 7e723671616..01e92e4e85c 100644 --- a/google/resource_bigquery_table.go +++ b/google/resource_bigquery_table.go @@ -336,13 +336,13 @@ func resourceBigQueryTable() *schema.Resource { Description: `Number of milliseconds for which to keep the storage for a partition.`, }, - // Type: [Required] The only type supported is DAY, which will generate - // one partition per day based on data loading time. + // Type: [Required] The supported types are DAY and HOUR, which will generate + // one partition per day or hour based on data loading time. "type": { Type: schema.TypeString, Required: true, - Description: `The only type supported is DAY, which will generate one partition per day based on data loading time.`, - ValidateFunc: validation.StringInSlice([]string{"DAY"}, false), + Description: `The supported types are DAY and HOUR, which will generate one partition per day or hour based on data loading time.`, + ValidateFunc: validation.StringInSlice([]string{"DAY", "HOUR"}, false), }, // Field: [Optional] The field used to determine how to create a time-based diff --git a/google/resource_bigquery_table_test.go b/google/resource_bigquery_table_test.go index eb665179e22..5a6228d3f3b 100644 --- a/google/resource_bigquery_table_test.go +++ b/google/resource_bigquery_table_test.go @@ -20,7 +20,7 @@ func TestAccBigQueryTable_Basic(t *testing.T) { CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), Steps: []resource.TestStep{ { - Config: testAccBigQueryTable(datasetID, tableID), + Config: testAccBigQueryTableDailyTimePartitioning(datasetID, tableID), }, { ResourceName: "google_bigquery_table.test", @@ -64,6 +64,37 @@ func TestAccBigQueryTable_Kms(t *testing.T) { }) } +func TestAccBigQueryTable_HourlyTimePartitioning(t *testing.T) { + t.Parallel() + + datasetID := fmt.Sprintf("tf_test_%s", randString(t, 10)) + tableID := fmt.Sprintf("tf_test_%s", randString(t, 10)) + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableHourlyTimePartitioning(datasetID, tableID), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + }, + { + Config: testAccBigQueryTableUpdated(datasetID, tableID), + }, + { + ResourceName: "google_bigquery_table.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + func TestAccBigQueryTable_HivePartitioning(t *testing.T) { t.Parallel() bucketName := testBucketName(t) @@ -261,7 +292,7 @@ func testAccCheckBigQueryTableDestroyProducer(t *testing.T) func(s *terraform.St } } -func testAccBigQueryTable(datasetID, tableID string) string { +func testAccBigQueryTableDailyTimePartitioning(datasetID, tableID string) string { return fmt.Sprintf(` resource "google_bigquery_dataset" "test" { dataset_id = "%s" @@ -318,6 +349,63 @@ EOH `, datasetID, tableID) } +func testAccBigQueryTableHourlyTimePartitioning(datasetID, tableID string) string { + return fmt.Sprintf(` +resource "google_bigquery_dataset" "test" { + dataset_id = "%s" +} + +resource "google_bigquery_table" "test" { + table_id = "%s" + dataset_id = google_bigquery_dataset.test.dataset_id + + time_partitioning { + type = "HOUR" + field = "ts" + require_partition_filter = true + } + clustering = ["some_int", "some_string"] + schema = <