From 6fe89d6fb4a09d1d666cfe1cb788b2a2dddc1c29 Mon Sep 17 00:00:00 2001 From: Modular Magician Date: Mon, 18 May 2020 16:50:04 +0000 Subject: [PATCH] allow bigquery job fields that reference resources in blocks to do so with one field (#3519) Signed-off-by: Modular Magician --- .changelog/3519.txt | 3 + google/resource_big_query_job.go | 467 +++++++----------- .../resource_big_query_job_generated_test.go | 358 ++++++++++++++ website/docs/r/bigquery_job.html.markdown | 86 +++- 4 files changed, 621 insertions(+), 293 deletions(-) create mode 100644 .changelog/3519.txt diff --git a/.changelog/3519.txt b/.changelog/3519.txt new file mode 100644 index 00000000000..3e33eb44d14 --- /dev/null +++ b/.changelog/3519.txt @@ -0,0 +1,3 @@ +```release-note:enhancement +bigquery: added ability for various `table_id` fields (and one `dataset_id` field) in `google_bigquery_job` to specify a relative path instead of just the table id +``` diff --git a/google/resource_big_query_job.go b/google/resource_big_query_job.go index b7e4993621a..383890c98d0 100644 --- a/google/resource_big_query_job.go +++ b/google/resource_big_query_job.go @@ -18,6 +18,7 @@ import ( "fmt" "log" "reflect" + "regexp" "strconv" "time" @@ -26,6 +27,11 @@ import ( "google.golang.org/api/googleapi" ) +var ( + bigqueryDatasetRegexp = regexp.MustCompile("projects/(.+)/datasets/(.+)") + bigqueryTableRegexp = regexp.MustCompile("projects/(.+)/datasets/(.+)/tables/(.+)") +) + func resourceBigQueryJob() *schema.Resource { return &schema.Resource{ Create: resourceBigQueryJobCreate, @@ -57,24 +63,27 @@ func resourceBigQueryJob() *schema.Resource { Description: `Source tables to copy.`, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "table_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, +or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.`, + }, "dataset_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the dataset containing this table.`, }, "project_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the project containing this table.`, }, - "table_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The ID of the table.`, - }, }, }, }, @@ -115,24 +124,27 @@ The BigQuery Service Account associated with your project requires access to thi MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "table_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, +or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.`, + }, "dataset_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the dataset containing this table.`, }, "project_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the project containing this table.`, }, - "table_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The ID of the table.`, - }, }, }, }, @@ -240,24 +252,27 @@ Default is ','`, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "table_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, +or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.`, + }, "dataset_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the dataset containing this table.`, }, "project_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the project containing this table.`, }, - "table_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The ID of the table.`, - }, }, }, ExactlyOneOf: []string{}, @@ -301,24 +316,27 @@ Default is ','`, MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "table_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, +or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.`, + }, "dataset_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the dataset containing this table.`, }, "project_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the project containing this table.`, }, - "table_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The ID of the table.`, - }, }, }, }, @@ -588,13 +606,15 @@ Creation, truncation and append actions occur as one atomic update upon job comp Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "dataset_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `A unique ID for this dataset, without the project name.`, + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The dataset. Can be specified '{{dataset_id}}' if 'project_id' is also set, +or of the form 'projects/{{project}}/datasets/{{dataset_id}}' if not.`, }, "project_id": { Type: schema.TypeString, + Computed: true, Optional: true, ForceNew: true, Description: `The ID of the project containing this table.`, @@ -630,24 +650,27 @@ For queries that produce anonymous (cached) results, this field will be populate MaxItems: 1, Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ + "table_id": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: `The table. Can be specified '{{table_id}}' if 'project_id' and 'dataset_id' are also set, +or of the form 'projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}' if not.`, + }, "dataset_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the dataset containing this table.`, }, "project_id": { Type: schema.TypeString, - Required: true, + Computed: true, + Optional: true, ForceNew: true, Description: `The ID of the project containing this table.`, }, - "table_id": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: `The ID of the table.`, - }, }, }, }, @@ -1058,24 +1081,15 @@ func flattenBigQueryJobConfigurationQueryDestinationTable(v interface{}, d *sche return nil } transformed := make(map[string]interface{}) - transformed["project_id"] = - flattenBigQueryJobConfigurationQueryDestinationTableProjectId(original["projectId"], d, config) - transformed["dataset_id"] = - flattenBigQueryJobConfigurationQueryDestinationTableDatasetId(original["datasetId"], d, config) - transformed["table_id"] = - flattenBigQueryJobConfigurationQueryDestinationTableTableId(original["tableId"], d, config) - return []interface{}{transformed} -} -func flattenBigQueryJobConfigurationQueryDestinationTableProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - -func flattenBigQueryJobConfigurationQueryDestinationTableDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} + transformed["project_id"] = original["projectId"] + transformed["dataset_id"] = original["datasetId"] + transformed["table_id"] = original["tableId"] -func flattenBigQueryJobConfigurationQueryDestinationTableTableId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v + if bigqueryTableRegexp.MatchString(d.Get("query.0.destination_table.0.table_id").(string)) { + // The user specified the table_id as a URL, so store it in state that way + transformed["table_id"] = fmt.Sprintf("projects/%s/datasets/%s/tables/%s", transformed["project_id"], transformed["dataset_id"], transformed["table_id"]) + } + return []interface{}{transformed} } func flattenBigQueryJobConfigurationQueryUserDefinedFunctionResources(v interface{}, d *schema.ResourceData, config *Config) interface{} { @@ -1122,18 +1136,14 @@ func flattenBigQueryJobConfigurationQueryDefaultDataset(v interface{}, d *schema return nil } transformed := make(map[string]interface{}) - transformed["dataset_id"] = - flattenBigQueryJobConfigurationQueryDefaultDatasetDatasetId(original["datasetId"], d, config) - transformed["project_id"] = - flattenBigQueryJobConfigurationQueryDefaultDatasetProjectId(original["projectId"], d, config) - return []interface{}{transformed} -} -func flattenBigQueryJobConfigurationQueryDefaultDatasetDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} + transformed["project_id"] = original["projectId"] + transformed["dataset_id"] = original["datasetId"] -func flattenBigQueryJobConfigurationQueryDefaultDatasetProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v + if bigqueryDatasetRegexp.MatchString(d.Get("query.0.default_dataset.0.dataset_id").(string)) { + // The user specified the dataset_id as a URL, so store it in state that way + transformed["dataset_id"] = fmt.Sprintf("projects/%s/datasets/%s", transformed["project_id"], transformed["dataset_id"]) + } + return []interface{}{transformed} } func flattenBigQueryJobConfigurationQueryPriority(v interface{}, d *schema.ResourceData, config *Config) interface{} { @@ -1293,24 +1303,15 @@ func flattenBigQueryJobConfigurationLoadDestinationTable(v interface{}, d *schem return nil } transformed := make(map[string]interface{}) - transformed["project_id"] = - flattenBigQueryJobConfigurationLoadDestinationTableProjectId(original["projectId"], d, config) - transformed["dataset_id"] = - flattenBigQueryJobConfigurationLoadDestinationTableDatasetId(original["datasetId"], d, config) - transformed["table_id"] = - flattenBigQueryJobConfigurationLoadDestinationTableTableId(original["tableId"], d, config) - return []interface{}{transformed} -} -func flattenBigQueryJobConfigurationLoadDestinationTableProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - -func flattenBigQueryJobConfigurationLoadDestinationTableDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} + transformed["project_id"] = original["projectId"] + transformed["dataset_id"] = original["datasetId"] + transformed["table_id"] = original["tableId"] -func flattenBigQueryJobConfigurationLoadDestinationTableTableId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v + if bigqueryTableRegexp.MatchString(d.Get("load.0.destination_table.0.table_id").(string)) { + // The user specified the table_id as a URL, so store it in state that way + transformed["table_id"] = fmt.Sprintf("projects/%s/datasets/%s/tables/%s", transformed["project_id"], transformed["dataset_id"], transformed["table_id"]) + } + return []interface{}{transformed} } func flattenBigQueryJobConfigurationLoadCreateDisposition(v interface{}, d *schema.ResourceData, config *Config) interface{} { @@ -1472,30 +1473,26 @@ func flattenBigQueryJobConfigurationCopySourceTables(v interface{}, d *schema.Re } l := v.([]interface{}) transformed := make([]interface{}, 0, len(l)) - for _, raw := range l { + for i, raw := range l { original := raw.(map[string]interface{}) if len(original) < 1 { // Do not include empty json objects coming back from the api continue } - transformed = append(transformed, map[string]interface{}{ - "project_id": flattenBigQueryJobConfigurationCopySourceTablesProjectId(original["projectId"], d, config), - "dataset_id": flattenBigQueryJobConfigurationCopySourceTablesDatasetId(original["datasetId"], d, config), - "table_id": flattenBigQueryJobConfigurationCopySourceTablesTableId(original["tableId"], d, config), - }) - } - return transformed -} -func flattenBigQueryJobConfigurationCopySourceTablesProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} + t := map[string]interface{}{ + "project_id": original["projectId"], + "dataset_id": original["datasetId"], + "table_id": original["tableId"], + } -func flattenBigQueryJobConfigurationCopySourceTablesDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} + if bigqueryTableRegexp.MatchString(d.Get(fmt.Sprintf("copy.0.source_tables.%d.table_id", i)).(string)) { + // The user specified the table_id as a URL, so store it in state that way + t["table_id"] = fmt.Sprintf("projects/%s/datasets/%s/tables/%s", t["project_id"], t["dataset_id"], t["table_id"]) + } + transformed = append(transformed, t) + } -func flattenBigQueryJobConfigurationCopySourceTablesTableId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v + return transformed } func flattenBigQueryJobConfigurationCopyDestinationTable(v interface{}, d *schema.ResourceData, config *Config) interface{} { @@ -1507,24 +1504,15 @@ func flattenBigQueryJobConfigurationCopyDestinationTable(v interface{}, d *schem return nil } transformed := make(map[string]interface{}) - transformed["project_id"] = - flattenBigQueryJobConfigurationCopyDestinationTableProjectId(original["projectId"], d, config) - transformed["dataset_id"] = - flattenBigQueryJobConfigurationCopyDestinationTableDatasetId(original["datasetId"], d, config) - transformed["table_id"] = - flattenBigQueryJobConfigurationCopyDestinationTableTableId(original["tableId"], d, config) - return []interface{}{transformed} -} -func flattenBigQueryJobConfigurationCopyDestinationTableProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - -func flattenBigQueryJobConfigurationCopyDestinationTableDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} + transformed["project_id"] = original["projectId"] + transformed["dataset_id"] = original["datasetId"] + transformed["table_id"] = original["tableId"] -func flattenBigQueryJobConfigurationCopyDestinationTableTableId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v + if bigqueryTableRegexp.MatchString(d.Get("copy.0.destination_table.0.table_id").(string)) { + // The user specified the table_id as a URL, so store it in state that way + transformed["table_id"] = fmt.Sprintf("projects/%s/datasets/%s/tables/%s", transformed["project_id"], transformed["dataset_id"], transformed["table_id"]) + } + return []interface{}{transformed} } func flattenBigQueryJobConfigurationCopyCreateDisposition(v interface{}, d *schema.ResourceData, config *Config) interface{} { @@ -1612,24 +1600,15 @@ func flattenBigQueryJobConfigurationExtractSourceTable(v interface{}, d *schema. return nil } transformed := make(map[string]interface{}) - transformed["project_id"] = - flattenBigQueryJobConfigurationExtractSourceTableProjectId(original["projectId"], d, config) - transformed["dataset_id"] = - flattenBigQueryJobConfigurationExtractSourceTableDatasetId(original["datasetId"], d, config) - transformed["table_id"] = - flattenBigQueryJobConfigurationExtractSourceTableTableId(original["tableId"], d, config) - return []interface{}{transformed} -} -func flattenBigQueryJobConfigurationExtractSourceTableProjectId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} - -func flattenBigQueryJobConfigurationExtractSourceTableDatasetId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v -} + transformed["project_id"] = original["projectId"] + transformed["dataset_id"] = original["datasetId"] + transformed["table_id"] = original["tableId"] -func flattenBigQueryJobConfigurationExtractSourceTableTableId(v interface{}, d *schema.ResourceData, config *Config) interface{} { - return v + if bigqueryTableRegexp.MatchString(d.Get("extract.0.source_table.0.table_id").(string)) { + // The user specified the table_id as a URL, so store it in state that way + transformed["table_id"] = fmt.Sprintf("projects/%s/datasets/%s/tables/%s", transformed["project_id"], transformed["dataset_id"], transformed["table_id"]) + } + return []interface{}{transformed} } func flattenBigQueryJobConfigurationExtractSourceModel(v interface{}, d *schema.ResourceData, config *Config) interface{} { @@ -1901,40 +1880,28 @@ func expandBigQueryJobConfigurationQueryDestinationTable(v interface{}, d Terraf original := raw.(map[string]interface{}) transformed := make(map[string]interface{}) - transformedProjectId, err := expandBigQueryJobConfigurationQueryDestinationTableProjectId(original["project_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { + transformedProjectId := original["project_id"] + if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { transformed["projectId"] = transformedProjectId } - transformedDatasetId, err := expandBigQueryJobConfigurationQueryDestinationTableDatasetId(original["dataset_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { + transformedDatasetId := original["dataset_id"] + if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { transformed["datasetId"] = transformedDatasetId } - transformedTableId, err := expandBigQueryJobConfigurationQueryDestinationTableTableId(original["table_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { + transformedTableId := original["table_id"] + if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { transformed["tableId"] = transformedTableId } - return transformed, nil -} - -func expandBigQueryJobConfigurationQueryDestinationTableProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryJobConfigurationQueryDestinationTableDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} + if parts := bigqueryTableRegexp.FindStringSubmatch(transformedTableId.(string)); parts != nil { + transformed["projectId"] = parts[1] + transformed["datasetId"] = parts[2] + transformed["tableId"] = parts[3] + } -func expandBigQueryJobConfigurationQueryDestinationTableTableId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil + return transformed, nil } func expandBigQueryJobConfigurationQueryUserDefinedFunctionResources(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { @@ -1991,31 +1958,24 @@ func expandBigQueryJobConfigurationQueryDefaultDataset(v interface{}, d Terrafor original := raw.(map[string]interface{}) transformed := make(map[string]interface{}) - transformedDatasetId, err := expandBigQueryJobConfigurationQueryDefaultDatasetDatasetId(original["dataset_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { + transformedProjectId := original["project_id"] + if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { + transformed["projectId"] = transformedProjectId + } + + transformedDatasetId := original["dataset_id"] + if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { transformed["datasetId"] = transformedDatasetId } - transformedProjectId, err := expandBigQueryJobConfigurationQueryDefaultDatasetProjectId(original["project_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { - transformed["projectId"] = transformedProjectId + if parts := bigqueryDatasetRegexp.FindStringSubmatch(transformedDatasetId.(string)); parts != nil { + transformed["projectId"] = parts[1] + transformed["datasetId"] = parts[2] } return transformed, nil } -func expandBigQueryJobConfigurationQueryDefaultDatasetDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryJobConfigurationQueryDefaultDatasetProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - func expandBigQueryJobConfigurationQueryPriority(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { return v, nil } @@ -2278,40 +2238,28 @@ func expandBigQueryJobConfigurationLoadDestinationTable(v interface{}, d Terrafo original := raw.(map[string]interface{}) transformed := make(map[string]interface{}) - transformedProjectId, err := expandBigQueryJobConfigurationLoadDestinationTableProjectId(original["project_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { + transformedProjectId := original["project_id"] + if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { transformed["projectId"] = transformedProjectId } - transformedDatasetId, err := expandBigQueryJobConfigurationLoadDestinationTableDatasetId(original["dataset_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { + transformedDatasetId := original["dataset_id"] + if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { transformed["datasetId"] = transformedDatasetId } - transformedTableId, err := expandBigQueryJobConfigurationLoadDestinationTableTableId(original["table_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { + transformedTableId := original["table_id"] + if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { transformed["tableId"] = transformedTableId } - return transformed, nil -} - -func expandBigQueryJobConfigurationLoadDestinationTableProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryJobConfigurationLoadDestinationTableDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} + if parts := bigqueryTableRegexp.FindStringSubmatch(transformedTableId.(string)); parts != nil { + transformed["projectId"] = parts[1] + transformed["datasetId"] = parts[2] + transformed["tableId"] = parts[3] + } -func expandBigQueryJobConfigurationLoadDestinationTableTableId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil + return transformed, nil } func expandBigQueryJobConfigurationLoadCreateDisposition(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { @@ -2499,44 +2447,33 @@ func expandBigQueryJobConfigurationCopySourceTables(v interface{}, d TerraformRe original := raw.(map[string]interface{}) transformed := make(map[string]interface{}) - transformedProjectId, err := expandBigQueryJobConfigurationCopySourceTablesProjectId(original["project_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { + transformedProjectId := original["project_id"] + if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { transformed["projectId"] = transformedProjectId } - transformedDatasetId, err := expandBigQueryJobConfigurationCopySourceTablesDatasetId(original["dataset_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { + transformedDatasetId := original["dataset_id"] + if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { transformed["datasetId"] = transformedDatasetId } - transformedTableId, err := expandBigQueryJobConfigurationCopySourceTablesTableId(original["table_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { + transformedTableId := original["table_id"] + if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { transformed["tableId"] = transformedTableId } + tableRef := regexp.MustCompile("projects/(.+)/datasets/(.+)/tables/(.+)") + if parts := tableRef.FindStringSubmatch(transformedTableId.(string)); parts != nil { + transformed["projectId"] = parts[1] + transformed["datasetId"] = parts[2] + transformed["tableId"] = parts[3] + } + req = append(req, transformed) } return req, nil } -func expandBigQueryJobConfigurationCopySourceTablesProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryJobConfigurationCopySourceTablesDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryJobConfigurationCopySourceTablesTableId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - func expandBigQueryJobConfigurationCopyDestinationTable(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { l := v.([]interface{}) if len(l) == 0 || l[0] == nil { @@ -2546,40 +2483,28 @@ func expandBigQueryJobConfigurationCopyDestinationTable(v interface{}, d Terrafo original := raw.(map[string]interface{}) transformed := make(map[string]interface{}) - transformedProjectId, err := expandBigQueryJobConfigurationCopyDestinationTableProjectId(original["project_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { + transformedProjectId := original["project_id"] + if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { transformed["projectId"] = transformedProjectId } - transformedDatasetId, err := expandBigQueryJobConfigurationCopyDestinationTableDatasetId(original["dataset_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { + transformedDatasetId := original["dataset_id"] + if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { transformed["datasetId"] = transformedDatasetId } - transformedTableId, err := expandBigQueryJobConfigurationCopyDestinationTableTableId(original["table_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { + transformedTableId := original["table_id"] + if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { transformed["tableId"] = transformedTableId } - return transformed, nil -} - -func expandBigQueryJobConfigurationCopyDestinationTableProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryJobConfigurationCopyDestinationTableDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} + if parts := bigqueryTableRegexp.FindStringSubmatch(transformedTableId.(string)); parts != nil { + transformed["projectId"] = parts[1] + transformed["datasetId"] = parts[2] + transformed["tableId"] = parts[3] + } -func expandBigQueryJobConfigurationCopyDestinationTableTableId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil + return transformed, nil } func expandBigQueryJobConfigurationCopyCreateDisposition(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { @@ -2714,40 +2639,28 @@ func expandBigQueryJobConfigurationExtractSourceTable(v interface{}, d Terraform original := raw.(map[string]interface{}) transformed := make(map[string]interface{}) - transformedProjectId, err := expandBigQueryJobConfigurationExtractSourceTableProjectId(original["project_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { + transformedProjectId := original["project_id"] + if val := reflect.ValueOf(transformedProjectId); val.IsValid() && !isEmptyValue(val) { transformed["projectId"] = transformedProjectId } - transformedDatasetId, err := expandBigQueryJobConfigurationExtractSourceTableDatasetId(original["dataset_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { + transformedDatasetId := original["dataset_id"] + if val := reflect.ValueOf(transformedDatasetId); val.IsValid() && !isEmptyValue(val) { transformed["datasetId"] = transformedDatasetId } - transformedTableId, err := expandBigQueryJobConfigurationExtractSourceTableTableId(original["table_id"], d, config) - if err != nil { - return nil, err - } else if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { + transformedTableId := original["table_id"] + if val := reflect.ValueOf(transformedTableId); val.IsValid() && !isEmptyValue(val) { transformed["tableId"] = transformedTableId } - return transformed, nil -} - -func expandBigQueryJobConfigurationExtractSourceTableProjectId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} - -func expandBigQueryJobConfigurationExtractSourceTableDatasetId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil -} + if parts := bigqueryTableRegexp.FindStringSubmatch(transformedTableId.(string)); parts != nil { + transformed["projectId"] = parts[1] + transformed["datasetId"] = parts[2] + transformed["tableId"] = parts[3] + } -func expandBigQueryJobConfigurationExtractSourceTableTableId(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { - return v, nil + return transformed, nil } func expandBigQueryJobConfigurationExtractSourceModel(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) { diff --git a/google/resource_big_query_job_generated_test.go b/google/resource_big_query_job_generated_test.go index d4c1c7828bb..9cc49fabf83 100644 --- a/google/resource_big_query_job_generated_test.go +++ b/google/resource_big_query_job_generated_test.go @@ -85,6 +85,73 @@ resource "google_bigquery_job" "job" { `, context) } +func TestAccBigQueryJob_bigqueryJobQueryTableReferenceExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccBigQueryJob_bigqueryJobQueryTableReferenceExample(context), + }, + { + ResourceName: "google_bigquery_job.job", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "query.0.default_dataset.0.dataset_id", "query.0.destination_table.0.table_id"}, + }, + }, + }) +} + +func testAccBigQueryJob_bigqueryJobQueryTableReferenceExample(context map[string]interface{}) string { + return Nprintf(` +resource "google_bigquery_table" "foo" { + dataset_id = google_bigquery_dataset.bar.dataset_id + table_id = "tf_test_job_query%{random_suffix}_table" +} + +resource "google_bigquery_dataset" "bar" { + dataset_id = "tf_test_job_query%{random_suffix}_dataset" + friendly_name = "test" + description = "This is a test description" + location = "US" +} + +resource "google_bigquery_job" "job" { + job_id = "tf_test_job_query%{random_suffix}" + + labels = { + "example-label" ="example-value" + } + + query { + query = "SELECT state FROM [lookerdata:cdc.project_tycho_reports]" + + destination_table { + table_id = google_bigquery_table.foo.id + } + + default_dataset { + dataset_id = google_bigquery_dataset.bar.id + } + + allow_large_results = true + flatten_results = true + + script_options { + key_result_statement = "LAST" + } + } +} +`, context) +} + func TestAccBigQueryJob_bigqueryJobLoadExample(t *testing.T) { t.Parallel() @@ -151,6 +218,70 @@ resource "google_bigquery_job" "job" { `, context) } +func TestAccBigQueryJob_bigqueryJobLoadTableReferenceExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccBigQueryJob_bigqueryJobLoadTableReferenceExample(context), + }, + { + ResourceName: "google_bigquery_job.job", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "load.0.destination_table.0.table_id"}, + }, + }, + }) +} + +func testAccBigQueryJob_bigqueryJobLoadTableReferenceExample(context map[string]interface{}) string { + return Nprintf(` +resource "google_bigquery_table" "foo" { + dataset_id = google_bigquery_dataset.bar.dataset_id + table_id = "tf_test_job_load%{random_suffix}_table" +} + +resource "google_bigquery_dataset" "bar" { + dataset_id = "tf_test_job_load%{random_suffix}_dataset" + friendly_name = "test" + description = "This is a test description" + location = "US" +} + +resource "google_bigquery_job" "job" { + job_id = "tf_test_job_load%{random_suffix}" + + labels = { + "my_job" ="load" + } + + load { + source_uris = [ + "gs://cloud-samples-data/bigquery/us-states/us-states-by-date.csv", + ] + + destination_table { + table_id = google_bigquery_table.foo.id + } + + skip_leading_rows = 1 + schema_update_options = ["ALLOW_FIELD_RELAXATION", "ALLOW_FIELD_ADDITION"] + + write_disposition = "WRITE_APPEND" + autodetect = true + } +} +`, context) +} + func TestAccBigQueryJob_bigqueryJobCopyExample(t *testing.T) { t.Parallel() @@ -303,6 +434,152 @@ resource "google_bigquery_job" "job" { `, context) } +func TestAccBigQueryJob_bigqueryJobCopyTableReferenceExample(t *testing.T) { + t.Parallel() + + context := map[string]interface{}{ + "project": getTestProjectFromEnv(), + "random_suffix": randString(t, 10), + } + + vcrTest(t, resource.TestCase{ + PreCheck: func() { testAccPreCheck(t) }, + Providers: testAccProviders, + Steps: []resource.TestStep{ + { + Config: testAccBigQueryJob_bigqueryJobCopyTableReferenceExample(context), + }, + { + ResourceName: "google_bigquery_job.job", + ImportState: true, + ImportStateVerify: true, + ImportStateVerifyIgnore: []string{"etag", "copy.0.destination_table.0.table_id", "copy.0.source_tables.0.table_id", "copy.0.source_tables.1.table_id"}, + }, + }, + }) +} + +func testAccBigQueryJob_bigqueryJobCopyTableReferenceExample(context map[string]interface{}) string { + return Nprintf(` +resource "google_bigquery_table" "source" { + count = length(google_bigquery_dataset.source) + + dataset_id = google_bigquery_dataset.source[count.index].dataset_id + table_id = "tf_test_job_copy%{random_suffix}_${count.index}_table" + + schema = < + + Open in Cloud Shell + + +## Example Usage - Bigquery Job Query Table Reference + + +```hcl +resource "google_bigquery_table" "foo" { + dataset_id = google_bigquery_dataset.bar.dataset_id + table_id = "job_query_table" +} + +resource "google_bigquery_dataset" "bar" { + dataset_id = "job_query_dataset" + friendly_name = "test" + description = "This is a test description" + location = "US" +} + +resource "google_bigquery_job" "job" { + job_id = "job_query" + + labels = { + "example-label" ="example-value" + } + + query { + query = "SELECT state FROM [lookerdata:cdc.project_tycho_reports]" + + destination_table { + table_id = google_bigquery_table.foo.id + } + + default_dataset { + dataset_id = google_bigquery_dataset.bar.id + } + + allow_large_results = true + flatten_results = true + + script_options { + key_result_statement = "LAST" + } + } +} +```
Open in Cloud Shell @@ -442,16 +490,17 @@ The `query` block supports: The `destination_table` block supports: * `project_id` - - (Required) + (Optional) The ID of the project containing this table. * `dataset_id` - - (Required) + (Optional) The ID of the dataset containing this table. * `table_id` - (Required) - The ID of the table. + The table. Can be specified `{{table_id}}` if `project_id` and `dataset_id` are also set, + or of the form `projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}` if not. The `user_defined_function_resources` block supports: @@ -468,7 +517,8 @@ The `default_dataset` block supports: * `dataset_id` - (Required) - A unique ID for this dataset, without the project name. + The dataset. Can be specified `{{dataset_id}}` if `project_id` is also set, + or of the form `projects/{{project}}/datasets/{{dataset_id}}` if not. * `project_id` - (Optional) @@ -643,16 +693,17 @@ The `load` block supports: The `destination_table` block supports: * `project_id` - - (Required) + (Optional) The ID of the project containing this table. * `dataset_id` - - (Required) + (Optional) The ID of the dataset containing this table. * `table_id` - (Required) - The ID of the table. + The table. Can be specified `{{table_id}}` if `project_id` and `dataset_id` are also set, + or of the form `projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}` if not. The `time_partitioning` block supports: @@ -723,30 +774,32 @@ The `copy` block supports: The `source_tables` block supports: * `project_id` - - (Required) + (Optional) The ID of the project containing this table. * `dataset_id` - - (Required) + (Optional) The ID of the dataset containing this table. * `table_id` - (Required) - The ID of the table. + The table. Can be specified `{{table_id}}` if `project_id` and `dataset_id` are also set, + or of the form `projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}` if not. The `destination_table` block supports: * `project_id` - - (Required) + (Optional) The ID of the project containing this table. * `dataset_id` - - (Required) + (Optional) The ID of the dataset containing this table. * `table_id` - (Required) - The ID of the table. + The table. Can be specified `{{table_id}}` if `project_id` and `dataset_id` are also set, + or of the form `projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}` if not. The `destination_encryption_configuration` block supports: @@ -797,16 +850,17 @@ The `extract` block supports: The `source_table` block supports: * `project_id` - - (Required) + (Optional) The ID of the project containing this table. * `dataset_id` - - (Required) + (Optional) The ID of the dataset containing this table. * `table_id` - (Required) - The ID of the table. + The table. Can be specified `{{table_id}}` if `project_id` and `dataset_id` are also set, + or of the form `projects/{{project}}/datasets/{{dataset_id}}/tables/{{table_id}}` if not. The `source_model` block supports: