diff --git a/blueprints/data-solutions/gcs-to-bq-with-least-privileges/README.md b/blueprints/data-solutions/gcs-to-bq-with-least-privileges/README.md index 1d3f939743..915ada213d 100644 --- a/blueprints/data-solutions/gcs-to-bq-with-least-privileges/README.md +++ b/blueprints/data-solutions/gcs-to-bq-with-least-privileges/README.md @@ -145,13 +145,13 @@ Once this is done, the 3 files necessary to run the Dataflow Job will have been Run the following command to start the dataflow job: - gcloud --impersonate-service-account=orchestrator@$SERVICE_PROJECT_ID.iam.gserviceaccount.com dataflow jobs run test_batch_01 \ + gcloud --impersonate-service-account=orchestrator@$SERVICE_PROJECT_ID.iam.gserviceaccount.com dataflow jobs run test_batch_01 \ --gcs-location gs://dataflow-templates/latest/GCS_Text_to_BigQuery \ --project $SERVICE_PROJECT_ID \ --region europe-west1 \ --disable-public-ips \ --subnetwork https://www.googleapis.com/compute/v1/projects/$SERVICE_PROJECT_ID/regions/europe-west1/subnetworks/subnet \ - --staging-location gs://$PREFIX-df-tmp\ + --staging-location gs://$PREFIX-df-tmp \ --service-account-email df-loading@$SERVICE_PROJECT_ID.iam.gserviceaccount.com \ --parameters \ javascriptTextTransformFunctionName=transform,\