Skip to content

Commit

Permalink
Revert "add ip_configuration to dataflow_job (GoogleCloudPlatform#2504)"
Browse files Browse the repository at this point in the history
This reverts commit 638f2f2.
  • Loading branch information
JanMa authored Oct 25, 2019
1 parent 6288680 commit f7d66e1
Show file tree
Hide file tree
Showing 5 changed files with 2 additions and 56 deletions.
2 changes: 1 addition & 1 deletion build/terraform
2 changes: 1 addition & 1 deletion build/terraform-beta
8 changes: 0 additions & 8 deletions third_party/terraform/resources/resource_dataflow_job.go
Original file line number Diff line number Diff line change
Expand Up @@ -122,13 +122,6 @@ func resourceDataflowJob() *schema.Resource {
Optional: true,
ForceNew: true,
},

"ip_configuration": {
Type: schema.TypeString,
Optional: true,
ForceNew: true,
ValidateFunc: validation.StringInSlice([]string{"WORKER_IP_PUBLIC", "WORKER_IP_PRIVATE", ""}, false),
},
},
}
}
Expand Down Expand Up @@ -161,7 +154,6 @@ func resourceDataflowJobCreate(d *schema.ResourceData, meta interface{}) error {
Subnetwork: d.Get("subnetwork").(string),
TempLocation: d.Get("temp_gcs_location").(string),
MachineType: d.Get("machine_type").(string),
IpConfiguration: d.Get("ip_configuration").(string),
AdditionalUserLabels: labels,
Zone: zone,
}
Expand Down
45 changes: 0 additions & 45 deletions third_party/terraform/tests/resource_dataflow_job_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -131,24 +131,6 @@ func TestAccDataflowJobCreateWithLabels(t *testing.T) {
})
}

func TestAccDataflowJobCreateWithIpConfig(t *testing.T) {
t.Parallel()
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckDataflowJobDestroy,
Steps: []resource.TestStep{
{
Config: testAccDataflowJobWithIpConfig,
Check: resource.ComposeTestCheckFunc(
testAccDataflowJobExists(
"google_dataflow_job.big_data"),
),
},
},
})
}

func testAccCheckDataflowJobDestroy(s *terraform.State) error {
for _, rs := range s.RootModule().Resources {
if rs.Type != "google_dataflow_job" {
Expand Down Expand Up @@ -537,33 +519,6 @@ resource "google_dataflow_job" "big_data" {
on_delete = "cancel"
}`, acctest.RandString(10), acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv())

var testAccDataflowJobWithIpConfig = fmt.Sprintf(`
resource "google_storage_bucket" "temp" {
name = "dfjob-test-%s-temp"
force_destroy = true
}
resource "google_dataflow_job" "big_data" {
name = "dfjob-test-%s"
template_gcs_path = "gs://dataflow-templates/wordcount/template_file"
temp_gcs_location = "${google_storage_bucket.temp.url}"
machine_type = "n1-standard-2"
parameters = {
inputFile = "gs://dataflow-samples/shakespeare/kinglear.txt"
output = "${google_storage_bucket.temp.url}/output"
}
ip_configuration = "WORKER_IP_PRIVATE"
zone = "us-central1-f"
project = "%s"
on_delete = "cancel"
}`, acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv())

func testAccDataflowJobWithLabels(key string) string {
return fmt.Sprintf(`
resource "google_storage_bucket" "temp" {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,6 @@ The following arguments are supported:
* `network` - (Optional) The network to which VMs will be assigned. If it is not provided, "default" will be used.
* `subnetwork` - (Optional) The subnetwork to which VMs will be assigned. Should be of the form "regions/REGION/subnetworks/SUBNETWORK".
* `machine_type` - (Optional) The machine type to use for the job.
* `ip_configuration` - (Optional) The configuration for VM IPs. Options are `"WORKER_IP_PUBLIC"` or `"WORKER_IP_PUBLIC"`.


## Attributes Reference
Expand Down

0 comments on commit f7d66e1

Please sign in to comment.