Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add force_destroy option to bigquery dataset #2986

Merged
merged 6 commits into from
Feb 8, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion google/resource_bigquery_dataset.go
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,16 @@ func resourceBigQueryDataset() *schema.Resource {
},
},

// Delete Contents on Destroy: [Optional] If True, delete all the tables in the dataset.
// If False and the dataset contains tables, the request will fail.
// Default is False.
"delete_contents_on_destroy": {
Type: schema.TypeBool,
Optional: true,
Default: false,
DiffSuppressFunc: emptyOrDefaultStringSuppress("false"),
},

// SelfLink: [Output-only] A URL that can be used to access the resource
// again. You can use this URL in Get or Update requests to the
// resource.
Expand Down Expand Up @@ -407,7 +417,8 @@ func resourceBigQueryDatasetDelete(d *schema.ResourceData, meta interface{}) err
return err
}

if err := config.clientBigQuery.Datasets.Delete(id.Project, id.DatasetId).Do(); err != nil {
deleteContents := d.Get("delete_contents_on_destroy").(bool)
if err := config.clientBigQuery.Datasets.Delete(id.Project, id.DatasetId).DeleteContents(deleteContents).Do(); err != nil {
return err
}

Expand Down
63 changes: 63 additions & 0 deletions google/resource_bigquery_dataset_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
"google.golang.org/api/bigquery/v2"
)

func TestAccBigQueryDataset_basic(t *testing.T) {
Expand Down Expand Up @@ -39,6 +40,31 @@ func TestAccBigQueryDataset_basic(t *testing.T) {
})
}

func TestAccBigQueryDataset_datasetWithContents(t *testing.T) {
t.Parallel()

datasetID := fmt.Sprintf("tf_test_%s", acctest.RandString(10))
tableID := fmt.Sprintf("tf_test_%s", acctest.RandString(10))

resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckBigQueryDatasetDestroy,
Steps: []resource.TestStep{
{
Config: testAccBigQueryDatasetDeleteContents(datasetID),
Check: testAccAddTable(datasetID, tableID),
},
{
ResourceName: "google_bigquery_dataset.contents_test",
ImportState: true,
ImportStateVerify: true,
ImportStateVerifyIgnore: []string{"delete_contents_on_destroy"},
},
},
})
}

func TestAccBigQueryDataset_access(t *testing.T) {
t.Parallel()

Expand Down Expand Up @@ -180,6 +206,25 @@ func testAccCheckBigQueryDatasetDestroy(s *terraform.State) error {
return nil
}

func testAccAddTable(datasetID string, tableID string) resource.TestCheckFunc {
// Not actually a check, but adds a table independently of terraform
return func(s *terraform.State) error {
config := testAccProvider.Meta().(*Config)
table := &bigquery.Table{
TableReference: &bigquery.TableReference{
DatasetId: datasetID,
TableId: tableID,
ProjectId: config.Project,
},
}
_, err := config.clientBigQuery.Tables.Insert(config.Project, datasetID, table).Do()
if err != nil {
return fmt.Errorf("Could not create table")
}
return nil
}
}

func testAccBigQueryDataset(datasetID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down Expand Up @@ -214,6 +259,24 @@ resource "google_bigquery_dataset" "test" {
}`, datasetID)
}

func testAccBigQueryDatasetDeleteContents(datasetID string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "contents_test" {
dataset_id = "%s"
friendly_name = "foo"
description = "This is a foo description"
location = "EU"
default_partition_expiration_ms = 3600000
default_table_expiration_ms = 3600000
delete_contents_on_destroy = true

labels = {
env = "foo"
default_table_expiration_ms = 3600000
}
}`, datasetID)
}

func testAccBigQueryRegionalDataset(datasetID string, location string) string {
return fmt.Sprintf(`
resource "google_bigquery_dataset" "test" {
Expand Down