Skip to content

Commit

Permalink
Transfer Bigquery dataset to k8s-infra-public-pii
Browse files Browse the repository at this point in the history
Create Bigquery scheduled job running every 24h to copy dataset riaan_data_store from
k8s-infra-ii-sandbox to dataset k8s_infra_artifacts_gcslogs.

Signed-off-by: Arnaud Meukam <[email protected]>
  • Loading branch information
ameukam committed Jul 28, 2021
1 parent ea5fcb4 commit 1d7ea74
Showing 1 changed file with 34 additions and 0 deletions.
34 changes: 34 additions & 0 deletions infra/gcp/clusters/projects/k8s-infra-public-pii/bigquery.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@

// Service account dedicated for BigQuery Data Transfer
resource "google_service_account" "bq_data_transfer_writer" {
account_id = "bq-data-transfer"
description = "Service Acccount BigQuery Data Transfer"
}

// grant bigquery dataEditor role to the service account so that scheduled query can run
resource "google_project_iam_member" "bq_data_transfer_writer_binding" {
project = google_project.project.project_id
role = "roles/bigquery.dataEditor"
member = "serviceAccount:${google_service_account.bq_data_transfer_writer.email}"
}

resource "google_bigquery_data_transfer_config" "bq_data_transfer" {
display_name = "BigQuey data transfer to ${google_bigquery_dataset.audit-logs-gcs.dataset_id}"
project = google_project.project.project_id
data_source_id = "k8s-infra-ii-sandbox.riaan_data_store"
schedule = "every 24 hours" #Times are in UTC
destination_dataset_id = google_bigquery_dataset.audit-logs-gcs.dataset_id
service_account_name = google_service_account.bq_data_transfer_writer.email

params = {
write_disposition = "WRITE_TRUNCATE" #Overwrite existing data
}

schedule_options {
start_time = "15h00" #in UTC
}

email_preferences {
enable_failure_email = true
}
}

0 comments on commit 1d7ea74

Please sign in to comment.