Skip to content

Commit

Permalink
fixed folder-level fallback policy retrival
Browse files Browse the repository at this point in the history
  • Loading branch information
kwadie committed Apr 19, 2023
1 parent dd51677 commit e1fe45c
Show file tree
Hide file tree
Showing 7 changed files with 171 additions and 160 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import com.google.cloud.pso.bq_snapshot_manager.services.backup_policy.BackupPolicyService;
import com.google.cloud.pso.bq_snapshot_manager.services.backup_policy.BackupPolicyServiceFireStoreImpl;
import com.google.cloud.pso.bq_snapshot_manager.services.pubsub.PubSubServiceImpl;
import com.google.cloud.pso.bq_snapshot_manager.services.scan.ResourceScannerImpl;
import com.google.cloud.pso.bq_snapshot_manager.services.set.GCSPersistentSetImpl;
import com.google.gson.Gson;
import org.springframework.boot.SpringApplication;
Expand Down Expand Up @@ -126,6 +127,7 @@ public ResponseEntity receiveMessage(@RequestBody PubSubEvent requestBody) {
new BigQueryServiceImpl(configuratorRequest.getTargetTable().getProject()),
backupPolicyService,
new PubSubServiceImpl(),
new ResourceScannerImpl(),
new GCSPersistentSetImpl(environment.getGcsFlagsBucket()),
fallbackBackupPolicy,
"configurator-flags",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import com.google.cloud.pso.bq_snapshot_manager.services.backup_policy.BackupPolicyService;
import com.google.cloud.pso.bq_snapshot_manager.services.pubsub.PubSubPublishResults;
import com.google.cloud.pso.bq_snapshot_manager.services.pubsub.PubSubService;
import com.google.cloud.pso.bq_snapshot_manager.services.scan.ResourceScanner;
import com.google.cloud.pso.bq_snapshot_manager.services.set.PersistentSet;
import org.springframework.scheduling.support.CronExpression;

Expand All @@ -35,6 +36,8 @@ public class Configurator {

private final BackupPolicyService backupPolicyService;
private final PubSubService pubSubService;

private final ResourceScanner resourceScanner;
private final PersistentSet persistentSet;
private final FallbackBackupPolicy fallbackBackupPolicy;
private final String persistentSetObjectPrefix;
Expand All @@ -44,6 +47,7 @@ public Configurator(ConfiguratorConfig config,
BigQueryService bqService,
BackupPolicyService backupPolicyService,
PubSubService pubSubService,
ResourceScanner resourceScanner,
PersistentSet persistentSet,
FallbackBackupPolicy fallbackBackupPolicy,
String persistentSetObjectPrefix,
Expand All @@ -52,6 +56,7 @@ public Configurator(ConfiguratorConfig config,
this.bqService = bqService;
this.backupPolicyService = backupPolicyService;
this.pubSubService = pubSubService;
this.resourceScanner = resourceScanner;
this.persistentSet = persistentSet;
this.fallbackBackupPolicy = fallbackBackupPolicy;
this.persistentSetObjectPrefix = persistentSetObjectPrefix;
Expand Down Expand Up @@ -241,8 +246,16 @@ public BackupPolicy getBackupPolicy(ConfiguratorRequest request) throws IOExcept
);

// find the most granular fallback policy table > dataset > project
BackupPolicy fallbackPolicy = findFallbackBackupPolicy(fallbackBackupPolicy,
request.getTargetTable()).y();
Tuple<String, BackupPolicy> fallbackBackupPolicyTuple = findFallbackBackupPolicy(
fallbackBackupPolicy,
request.getTargetTable());
BackupPolicy fallbackPolicy = fallbackBackupPolicyTuple.y();

logger.logInfoWithTracker(request.isDryRun(),
request.getTrackingId(),
request.getTargetTable(),
String.format("Will use a %s-level fallback policy", fallbackBackupPolicyTuple.x() )
);

// if there is a system attached policy, then only use the last_xyz fields from it and use the latest fallback policy
// the last_backup_at needs to be checked to determine if we should take a backup in this run
Expand Down Expand Up @@ -391,9 +404,9 @@ public Tuple<SnapshoterRequest, SnapshoterRequest> prepareSnapshotRequests(Backu
return Tuple.of(bqSnapshotRequest, gcsSnapshotRequest);
}

public static Tuple<String, BackupPolicy> findFallbackBackupPolicy(FallbackBackupPolicy
public Tuple<String, BackupPolicy> findFallbackBackupPolicy(FallbackBackupPolicy
fallbackBackupPolicy,
TableSpec tableSpec) {
TableSpec tableSpec) throws IOException {

BackupPolicy tableLevel = fallbackBackupPolicy.getTableOverrides().get(tableSpec.toSqlString());
if (tableLevel != null) {
Expand All @@ -414,9 +427,16 @@ public static Tuple<String, BackupPolicy> findFallbackBackupPolicy(FallbackBacku
return Tuple.of("project", projectLevel);
}

//TODO: check for folder level by getting the folder id of a project from the API
// API CALL
String folderId = resourceScanner.getParentFolderId(tableSpec.getProject());
if(folderId != null){
BackupPolicy folderLevel = fallbackBackupPolicy.getFolderOverrides().get(folderId);
if(folderLevel != null){
return Tuple.of("folder", folderLevel);
}
}

// else return the global default policy
return Tuple.of("global", fallbackBackupPolicy.getDefaultPolicy());
return Tuple.of("default", fallbackBackupPolicy.getDefaultPolicy());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,6 @@ public interface ResourceScanner {

// list tables under a project/dataset in the format "project.dataset.table"
List<String> listTables(String project, String dataset) throws InterruptedException, NonRetryableApplicationException;

String getParentFolderId(String project) throws IOException;
}
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;

import com.google.api.services.cloudresourcemanager.v3.CloudResourceManager;
import com.google.api.services.iam.v1.IamScopes;

Expand All @@ -50,30 +51,27 @@ public ResourceScannerImpl() throws IOException, GeneralSecurityException {
@Override
public List<String> listTables(String projectId, String datasetId) {
return StreamSupport.stream(bqService.listTables(DatasetId.of(projectId, datasetId)).iterateAll().spliterator(),
false)
false)
.filter(t -> t.getDefinition().getType().equals(TableDefinition.Type.TABLE))
.map(t -> String.format("%s.%s.%s", projectId, datasetId, t.getTableId().getTable()))
.collect(Collectors.toCollection(ArrayList::new));
}



@Override
public List<String> listDatasets(String projectId) {
return StreamSupport.stream(bqService.listDatasets(projectId)
.iterateAll()
.spliterator(),
false)
.iterateAll()
.spliterator(),
false)
.map(d -> String.format("%s.%s", projectId, d.getDatasetId().getDataset()))
.collect(Collectors.toCollection(ArrayList::new));
}


@Override
public List<String> listProjects(Long folderId) throws IOException {

List<Project> projects = cloudResourceManager.projects().list()
.setParent("folders/"+folderId)
.setParent("folders/" + folderId)
.execute()
.getProjects();

Expand All @@ -84,6 +82,30 @@ public List<String> listProjects(Long folderId) throws IOException {

}

/**
* Returns the folder id of the direct parent of a project.
* If the project doesn't have a folder, it returns null.
* If the project doesn't exsist it will throw an exception
*/
@Override
public String getParentFolderId(String project) throws IOException {
String parentFolder = cloudResourceManager
.projects()
.get(String.format("projects/%s", project))
.execute()
.getParent();

if (parentFolder == null) {
return null;
}
if (parentFolder.startsWith("folders/")){
// API returns "folders/folder_name" and we just return folder_name
return parentFolder.substring(8);
}
// in all other cases, like the parent being organizations/xyz, return null
return null;
}

public static CloudResourceManager createCloudResourceManagerService()
throws IOException, GeneralSecurityException {
// Use the Application Default Credentials strategy for authentication. For more info, see:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -35,8 +35,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -46,8 +46,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -59,8 +59,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -70,8 +70,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -83,8 +83,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -94,8 +94,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -107,8 +107,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -118,8 +118,8 @@ public void testParsing() throws JsonProcessingException {
" \"backup_method\": \"BigQuery Snapshot\",\n" +
" \"backup_time_travel_offset_days\": \"0\",\n" +
" \"bq_snapshot_expiration_days\": \"15\",\n" +
" \"backup_storage_project\": \"project\",\n" +
" \"backup_operation_project\": \"project\",\n" +
" \"backup_storage_project\": \"storage_project\",\n" +
" \"backup_operation_project\": \"operation_project\",\n" +
" \"bq_snapshot_storage_dataset\": \"dataset\",\n" +
" \"gcs_snapshot_storage_location\": \"gs://bla/\",\n" +
" \"config_source\": \"SYSTEM\"\n" +
Expand All @@ -131,9 +131,9 @@ public void testParsing() throws JsonProcessingException {
BackupMethod.BIGQUERY_SNAPSHOT,
TimeTravelOffsetDays.DAYS_0,
BackupConfigSource.SYSTEM,
"project"
"storage_project"
)
.setBackupOperationProject("project")
.setBackupOperationProject("operation_project")
.setBigQuerySnapshotExpirationDays(15.0)
.setBigQuerySnapshotStorageDataset("dataset")
.setGcsSnapshotStorageLocation("gs://bla/")
Expand Down
Loading

0 comments on commit e1fe45c

Please sign in to comment.