Skip to content

Commit

Permalink
Merge pull request #7232 from SalesforceFoundation/feature/250__field…
Browse files Browse the repository at this point in the history
…s-not-updated-ge-batch-process

W-15110724 - 250__fields-not-updated-ge-batch-process
  • Loading branch information
daniel-fuller authored May 14, 2024
2 parents 829ccd8 + 594f62c commit d6c12e3
Show file tree
Hide file tree
Showing 5 changed files with 63 additions and 4 deletions.
25 changes: 24 additions & 1 deletion force-app/main/default/classes/BDI_DataImportService.cls
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,13 @@ global with sharing class BDI_DataImportService {
'Status__c != \'' + BDI_DataImport_API.bdiImported + '\'',
'Id =: dataImportIds'
};
List<String> selectClause = new List<String>{
String.join(listStrDataImportFields, ','),
DATAIMPORT_BATCH_NUMBER_FIELD
};

return new UTIL_Query()
.withSelectFields(listStrDataImportFields)
.withSelectFields(selectClause)
.withFrom(DataImport__c.SObjectType)
.withWhere(whereClauses)
// this ensures consistency for our test code, but also should
Expand Down Expand Up @@ -619,6 +623,25 @@ global with sharing class BDI_DataImportService {

this.listDI = checkRDFields(listDI);

if (apexJobId != null && listDI.size() > 0) {
List<DataImportBatch__c> listBatch = [SELECT Name, Batch_Number__c, Batch_Status__c, Batch_Defaults__c,
Form_Template__c, RequireTotalMatch__c, Expected_Count_of_Gifts__c,
Expected_Total_Batch_Amount__c, Batch_Table_Columns__c, LastModifiedDate
FROM DataImportBatch__c WHERE Id= :listDI[0].NPSP_Data_Import_Batch__c LIMIT 1];
if (listBatch.size() > 0 ) {
GiftBatch giftBatch = new GiftBatch(listBatch[0]);
Boolean firstInstallmentPaid = giftBatch.shouldPayFirstInstallment();

for (DataImport__c dataImport : listDI) {
if(dataImport.Recurring_Donation_Recurring_Type__c != null) {
dataImport.Donation_Date__c = null;
if (!firstInstallmentPaid) {
dataImport.Donation_Amount__c = null;
}
}
}
}
}
// do any performance optimizations to avoid unnecessary code
disableAllOppRollups();

Expand Down
22 changes: 22 additions & 0 deletions force-app/main/default/classes/BDI_DataImport_API.cls
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,28 @@ global with sharing class BDI_DataImport_API {
return apexJobId;
}

global static Id processDataImportRecords(Data_Import_Settings__c diSettings,
List<Id> dataImportIds,
Boolean isDryRun, Id batchId) {
Id apexJobId;
if (dataImportIds != null && dataImportIds.size() > 0) {
// Use configured data import settings if none provided.
if (diSettings == null) {
diSettings = UTIL_CustomSettingsFacade.getDataImportSettings();
}
Savepoint sp = Database.setSavepoint();
try {
BDI_DataImport_BATCH batch = new BDI_DataImport_BATCH(batchId, dataImportIds, new BDI_DataImportService(isDryRun, BDI_DataImportService.getDefaultMappingService()));
apexJobId = Database.executeBatch(batch, integer.valueOf(diSettings.Batch_Size__c));
} catch (Exception ex) {
Database.rollback(sp);
ex.setMessage(System.label.bdiAPISelectedError + ' ' + ex.getMessage());
throw ex;
}
}
return apexJobId;
}

/*******************************************************************************************************
* @description The return result object for each batch that is provided to processDataImportBatches()
*/
Expand Down
9 changes: 8 additions & 1 deletion force-app/main/default/classes/GiftBatchForQueueable.cls
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,17 @@ public inherited sharing class GiftBatchForQueueable {

public void processChunk() {
List<DataImport__c> dataImports = gifts.asDataImports();
BDI_DataImport_API.processDataImportRecords(dataImportSettings, dataImports, false);
List<Id> lstDataImportIds = new List<Id>(new Map<Id, DataImport__c>(dataImports).keySet());
BDI_DataImport_API.processDataImportRecords(dataImportSettings, lstDataImportIds, false);
chunkedIds.remove(0);
}

public void processChunk(Id batchId) {
List<DataImport__c> dataImports = gifts.asDataImports();
List<Id> lstDataImportIds = new List<Id>(new Map<Id, DataImport__c>(dataImports).keySet());
BDI_DataImport_API.processDataImportRecords(dataImportSettings, lstDataImportIds, false, batchId);
chunkedIds.remove(0);
}
public void chunkGiftsThatCanBeProcessed() {
List<SObject> results = giftsSelector.getGiftsReadyToMoveToProcessing(giftBatchId);
if (results.size() > 0) {
Expand Down
4 changes: 3 additions & 1 deletion force-app/main/default/classes/GiftBatchService_TEST.cls
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,10 @@ private class GiftBatchService_TEST {
Test.stopTest();

// Assert
Integer jobsCount = [SELECT count() FROM AsyncApexJob];
Integer jobsCount = [SELECT count() FROM AsyncApexJob WHERE JobType = 'Queueable'];
System.assertEquals(1, jobsCount, 'Should have enqueued a job');
Integer batchjobsCount = [SELECT count() FROM AsyncApexJob WHERE JobType = 'BatchApex'];
System.assertEquals(1, batchjobsCount, 'Should have one batch apex job');

Integer opportunitiesCount = [SELECT count() FROM Opportunity];
System.assertEquals(10, opportunitiesCount, 'Should have created 10 opportunities');
Expand Down
7 changes: 6 additions & 1 deletion force-app/main/default/classes/GiftEntryProcessorQueue.cls
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ public with sharing class GiftEntryProcessorQueue implements Queueable, Database
private final String ABORTED = 'ABORTED';
private GiftBatchForQueueable queueableGiftBatch;
private AsyncApexJobId queueableId;
private GiftBatchId giftBatchId;

@TestVisible
private GiftBatchService giftBatchService {
Expand All @@ -52,6 +53,7 @@ public with sharing class GiftEntryProcessorQueue implements Queueable, Database

public GiftEntryProcessorQueue(GiftBatchForQueueable giftBatchForProcessing) {
this.queueableGiftBatch = giftBatchForProcessing;
this.giftBatchId = giftBatchForProcessing.id();
}

public void execute(QueueableContext queueableContext) {
Expand All @@ -64,7 +66,10 @@ public with sharing class GiftEntryProcessorQueue implements Queueable, Database
queueableGiftBatch.captureElevateBatches();
queueableGiftBatch.updateGiftsInChunk();
queueableGiftBatch.preprocessRecurringGifts();
queueableGiftBatch.processChunk();
queueableGiftBatch.processChunk(giftBatchId.value());
} else {
BDI_DataImport_BATCH batch = new BDI_DataImport_BATCH(giftBatchId.value(), false);
String jobId = Database.executeBatch(batch, Integer.valueOf(batch.diSettings.Batch_Size__c));
}

if (queueableGiftBatch.hasChunksToProcess()) {
Expand Down

0 comments on commit d6c12e3

Please sign in to comment.