diff --git a/google/resource_dataflow_job.go b/google/resource_dataflow_job.go index 330f11ab290..a4d5f04128c 100644 --- a/google/resource_dataflow_job.go +++ b/google/resource_dataflow_job.go @@ -54,6 +54,12 @@ func resourceDataflowJob() *schema.Resource { ForceNew: true, }, + "machine_type": &schema.Schema{ + Type: schema.TypeString, + Optional: true, + ForceNew: true, + }, + "max_workers": &schema.Schema{ Type: schema.TypeInt, Optional: true, @@ -106,6 +112,7 @@ func resourceDataflowJobCreate(d *schema.ResourceData, meta interface{}) error { TempLocation: d.Get("temp_gcs_location").(string), Zone: zone, MaxWorkers: int64(d.Get("max_workers").(int)), + MachineType: d.Get("machine_type").(string), } request := dataflow.CreateJobFromTemplateRequest{ diff --git a/google/resource_dataflow_job_test.go b/google/resource_dataflow_job_test.go index bd13019b738..d4ce59433ba 100644 --- a/google/resource_dataflow_job_test.go +++ b/google/resource_dataflow_job_test.go @@ -86,6 +86,7 @@ resource "google_dataflow_job" "big_data" { } zone = "us-central1-f" project = "%s" + machine_type = "n1-standard-1" on_delete = "cancel" }`, acctest.RandString(10), acctest.RandString(10), getTestProjectFromEnv()) diff --git a/website/docs/r/dataflow_job.html.markdown b/website/docs/r/dataflow_job.html.markdown index 7543ee4f511..028273e968f 100644 --- a/website/docs/r/dataflow_job.html.markdown +++ b/website/docs/r/dataflow_job.html.markdown @@ -49,6 +49,7 @@ The following arguments are supported: * `on_delete` - (Optional) One of "drain" or "cancel". Specifies behavior of deletion during `terraform destroy`. See above note. * `project` - (Optional) The project in which the resource belongs. If it is not provided, the provider project is used. * `zone` - (Optional) The zone in which the created job should run. If it is not provided, the provider zone is used. +* `machine_type` - (Optional) The machine type for Google Compute Engine instances used in your pipeline execution. E.g., n1-standard-1. ## Attributes Reference