diff --git a/mmv1/products/bigquery/api.yaml b/mmv1/products/bigquery/api.yaml index f32f9e8f8014..166dfb6bdddd 100644 --- a/mmv1/products/bigquery/api.yaml +++ b/mmv1/products/bigquery/api.yaml @@ -725,7 +725,8 @@ objects: description: | The format of the data files. For CSV files, specify "CSV". For datastore backups, specify "DATASTORE_BACKUP". For newline-delimited JSON, specify "NEWLINE_DELIMITED_JSON". For Avro, specify "AVRO". For parquet, specify "PARQUET". - For orc, specify "ORC". The default value is CSV. + For orc, specify "ORC". [Beta] For Bigtable, specify "BIGTABLE". + The default value is CSV. default_value: 'CSV' - !ruby/object:Api::Type::Boolean name: 'allowJaggedRows' diff --git a/mmv1/third_party/terraform/resources/resource_bigquery_table.go b/mmv1/third_party/terraform/resources/resource_bigquery_table.go index c9ea365ab218..4927593005c7 100644 --- a/mmv1/third_party/terraform/resources/resource_bigquery_table.go +++ b/mmv1/third_party/terraform/resources/resource_bigquery_table.go @@ -389,7 +389,7 @@ func resourceBigQueryTable() *schema.Resource { Required: true, Description: `The data format. Supported values are: "CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "PARQUET", "ORC" and "DATASTORE_BACKUP". To use "GOOGLE_SHEETS" the scopes must include "googleapis.com/auth/drive.readonly".`, ValidateFunc: validation.StringInSlice([]string{ - "CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "DATASTORE_BACKUP", "PARQUET", "ORC", + "CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "DATASTORE_BACKUP", "PARQUET", "ORC", "BIGTABLE", }, false), }, // SourceURIs [Required] The fully-qualified URIs that point to your data in Google Cloud. diff --git a/mmv1/third_party/terraform/tests/resource_bigquery_table_test.go b/mmv1/third_party/terraform/tests/resource_bigquery_table_test.go index 41ff303be758..cc3c93520c48 100644 --- a/mmv1/third_party/terraform/tests/resource_bigquery_table_test.go +++ b/mmv1/third_party/terraform/tests/resource_bigquery_table_test.go @@ -425,6 +425,32 @@ func TestAccBigQueryExternalDataTable_CSV(t *testing.T) { }) } +func TestAccBigQueryDataTable_bigtable(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 8), + "project": getTestProjectFromEnv(), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + CheckDestroy: testAccCheckBigQueryTableDestroyProducer(t), + Steps: []resource.TestStep{ + { + Config: testAccBigQueryTableFromBigtable(context), + }, + { + ResourceName: "google_bigquery_table.table", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"deletion_protection"}, + }, + }, + }) +} + func TestAccBigQueryDataTable_sheet(t *testing.T) { t.Parallel() @@ -1406,6 +1432,53 @@ resource "google_bigquery_table" "test" { `, datasetID, bucketName, objectName, content, tableID, format, quoteChar) } +func testAccBigQueryTableFromBigtable(context map[string]interface{}) string { + return Nprintf(` + resource "google_bigtable_instance" "instance" { + name = "tf-test-bigtable-inst-%{random_suffix}" + cluster { + cluster_id = "tf-test-bigtable-%{random_suffix}" + zone = "us-central1-b" + } + instance_type = "DEVELOPMENT" + deletion_protection = false + } + resource "google_bigtable_table" "table" { + name = "%{random_suffix}" + instance_name = google_bigtable_instance.instance.name + column_family { + family = "cf-%{random_suffix}-first" + } + column_family { + family = "cf-%{random_suffix}-second" + } + } + resource "google_bigquery_table" "table" { + deletion_protection = false + dataset_id = google_bigquery_dataset.dataset.dataset_id + table_id = "tf_test_bigtable_%{random_suffix}" + external_data_configuration { + autodetect = true + source_format = "BIGTABLE" + ignore_unknown_values = true + source_uris = [ + "https://googleapis.com/bigtable/${google_bigtable_table.table.id}", + ] + } + } + resource "google_bigquery_dataset" "dataset" { + dataset_id = "tf_test_ds_%{random_suffix}" + friendly_name = "test" + description = "This is a test description" + location = "EU" + default_table_expiration_ms = 3600000 + labels = { + env = "default" + } + } +`, context) +} + func testAccBigQueryTableFromSheet(context map[string]interface{}) string { return Nprintf(` resource "google_bigquery_table" "table" { diff --git a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown index 036940cd14bb..5c563cc97221 100644 --- a/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown +++ b/mmv1/third_party/terraform/website/docs/r/bigquery_table.html.markdown @@ -186,8 +186,8 @@ The `external_data_configuration` block supports: `google_bigquery_table.schema` * `source_format` (Required) - The data format. Supported values are: - "CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "PARQUET", "ORC" - and "DATASTORE_BACKUP". To use "GOOGLE_SHEETS" + "CSV", "GOOGLE_SHEETS", "NEWLINE_DELIMITED_JSON", "AVRO", "PARQUET", "ORC", + "DATSTORE_BACKUP", and "BIGTABLE". To use "GOOGLE_SHEETS" the `scopes` must include "https://www.googleapis.com/auth/drive.readonly".