Skip to content

Commit

Permalink
feat: bulk db insert with batch upload
Browse files Browse the repository at this point in the history
  • Loading branch information
MichaelTaylor3D committed Jan 17, 2022
1 parent f2c2616 commit 26705bb
Show file tree
Hide file tree
Showing 2 changed files with 40 additions and 10 deletions.
25 changes: 20 additions & 5 deletions src/controllers/project.controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -199,13 +199,15 @@ export const batchUpload = async (req, res) => {
const buffer = csvFile.data;
const stream = Readable.from(buffer.toString('utf8'));

const recordsToCreate = [];

csv()
.fromStream(stream)
.subscribe(async (newRecord) => {
let action = 'UPDATE';

if (newRecord.warehouseProjectId) {
// Fail if they supplied their own warehouseUnitId and it doesnt exist
// Fail if they supplied their own warehouseProjectId and it doesnt exist
const possibleExistingRecord = await assertProjectRecordExists(
newRecord.warehouseProjectId,
);
Expand Down Expand Up @@ -234,7 +236,7 @@ export const batchUpload = async (req, res) => {
data: JSON.stringify([newRecord]),
};

await Staging.upsert(stagedData);
recordsToCreate.push(stagedData);
})
.on('error', (error) => {
if (!res.headersSent) {
Expand All @@ -244,9 +246,22 @@ export const batchUpload = async (req, res) => {
});
}
})
.on('done', () => {
if (!res.headersSent) {
res.json({ message: 'CSV processing complete' });
.on('done', async () => {
if (recordsToCreate.length) {
await Staging.bulkCreate(recordsToCreate);

if (!res.headersSent) {
res.json({
message:
'CSV processing complete, your records have been added to the staging table.',
});
}
} else {
if (!res.headersSent) {
res
.status(400)
.json({ message: 'There were no valid records to parse' });
}
}
});
};
25 changes: 20 additions & 5 deletions src/controllers/units.controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,8 @@ export const batchUpload = async (req, res) => {
const buffer = csvFile.data;
const stream = Readable.from(buffer.toString('utf8'));

const recordsToCreate = [];

csv()
.fromStream(stream)
.subscribe(async (newRecord) => {
Expand All @@ -251,7 +253,7 @@ export const batchUpload = async (req, res) => {
if (newRecord.warehouseUnitId) {
// Fail if they supplied their own warehouseUnitId and it doesnt exist
const possibleExistingRecord = await assertUnitRecordExists(
req.body.warehouseUnitId,
newRecord.warehouseUnitId,
);

assertOrgIsHomeOrg(res, possibleExistingRecord.dataValues.orgUid);
Expand Down Expand Up @@ -283,7 +285,7 @@ export const batchUpload = async (req, res) => {
data: JSON.stringify([newRecord]),
};

await Staging.upsert(stagedData);
recordsToCreate.push(stagedData);
})
.on('error', (error) => {
if (!res.headersSent) {
Expand All @@ -293,9 +295,22 @@ export const batchUpload = async (req, res) => {
});
}
})
.on('done', () => {
if (!res.headersSent) {
res.json({ message: 'CSV processing complete' });
.on('done', async () => {
if (recordsToCreate.length) {
await Staging.bulkCreate(recordsToCreate);

if (!res.headersSent) {
res.json({
message:
'CSV processing complete, your records have been added to the staging table.',
});
}
} else {
if (!res.headersSent) {
res
.status(400)
.json({ message: 'There were no valid records to parse' });
}
}
});
};

0 comments on commit 26705bb

Please sign in to comment.