Skip to content

Commit

Permalink
fix: change earnings/batch GET to use streams
Browse files Browse the repository at this point in the history
  • Loading branch information
Kpoke committed Oct 27, 2021
1 parent 6563b9b commit 0980970
Show file tree
Hide file tree
Showing 4 changed files with 148 additions and 105 deletions.
55 changes: 30 additions & 25 deletions api-tests/earnings-api.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -504,29 +504,34 @@ describe('Earnings API tests.', () => {
});
});

// describe('Earnings BATCH GET', () => {
// const binaryParser = (res, callback) => {
// res.setEncoding('binary');
// res.data = '';
// res.on('data', function (chunk) {
// res.data += chunk;
// });
// res.on('end', function () {
// callback(null, Buffer.from(res.data, 'binary'));
// });
// };
// it(`Should get earnings successfully`, function (done) {
// request(server)
// .get(`/earnings/batch`)
// .expect('Content-Type', 'text/csv; charset=utf-8')
// .buffer()
// .parse(binaryParser)
// .expect(200)
// .end(function (err, res) {
// if (err) return done(err);
// expect(res.body instanceof Buffer).to.be.true;
// return done();
// });
// });
// });
describe('Earnings BATCH GET', () => {
const binaryParser = (res, callback) => {
console.log('here');
res.setEncoding('binary');
res.data = '';
const headers = 'earnings_id,worker_id,phone,currency,amount,status';
let returnedHeadersEqlExpectedHeaders = false;
res.on('data', function (chunk) {
if (chunk === headers) returnedHeadersEqlExpectedHeaders = true;
res.data += chunk;
});
res.on('end', function () {
expect(returnedHeadersEqlExpectedHeaders).to.be.true;
callback(null, Buffer.from(res.data, 'binary'));
});
};
it(`Should get earnings successfully`, function (done) {
request(server)
.get(`/earnings/batch`)
.expect('Content-Type', 'text/csv; charset=utf-8')
.buffer()
.parse(binaryParser)
.expect(200)
.end(function (err, res) {
if (err) return done(err);
expect(res.body instanceof Buffer).to.be.true;
return done();
});
});
});
});
127 changes: 96 additions & 31 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
"server-test": "DEBUG=express:* NODE_LOG_LEVEL=debug nodemon server/serverTest.js",
"server": "nodemon server/server.js",
"test-seedDB": "NODE_ENV=test mocha -r dotenv/config dotenv_config_path=.env.test --timeout 10000 --require co-mocha './**/*.spec.js'",
"test-integration-ci": "NODE_ENV=test mocha -r dotenv/config --dotenv_config_path=.env.test --exit --timeout 30000 --require co-mocha './api-tests'",
"test-integration-ci": "mocha -r dotenv/config --dotenv_config_path=.env.test --exit --timeout 30000 --require co-mocha './api-tests'",
"test-watch": "NODE_ENV=test NODE_LOG_LEVEL=info mocha -r dotenv/config dotenv_config_path=.env.test --timeout 10000 --require co-mocha -w -b --ignore './server/repositories/**/*.spec.js' './server/setup.js' './server/**/*.spec.js' './__tests__/seed.spec.js' './__tests__/supertest.js'",
"test-watch-debug": "NODE_ENV=test NODE_LOG_LEVEL=debug mocha -r dotenv/config dotenv_config_path=.env.test --timeout 10000 --require co-mocha -w -b --ignore './server/repositories/**/*.spec.js' './server/setup.js' './server/**/*.spec.js' './__tests__/seed.spec.js' './__tests__/supertest.js'",
"prettier-fix": "prettier ./ --write",
Expand All @@ -31,6 +31,7 @@
"author": "Greenstand Engineers",
"license": "GPL-3.0-or-later",
"dependencies": {
"@fast-csv/format": "^4.3.5",
"@sentry/node": "^5.1.0",
"aws-sdk": "^2.1004.0",
"body-parser": "^1.18.2",
Expand All @@ -41,7 +42,6 @@
"express-async-handler": "^1.1.4",
"express-validator": "^6.4.0",
"joi": "^17.4.2",
"json2csv": "^5.0.6",
"knex": "^0.21.5",
"loglevel": "^1.6.8",
"multer": "^1.4.3",
Expand Down
67 changes: 20 additions & 47 deletions server/handlers/earningsHandler.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
const Joi = require('joi');
const { Parser, AsyncParser } = require('json2csv');
const csv = require('csvtojson');
const fs = require('fs');
const { v4: uuid } = require('uuid');
const { Readable, Transform } = require('stream');
const { format } = require('@fast-csv/format');

const { BatchEarning } = require('../models/Earnings');
const { uploadCsv } = require('../services/aws');
Expand Down Expand Up @@ -78,52 +77,26 @@ const earningsBatchGet = async (req, res, next) => {
const session = new Session();
const earningsRepo = new EarningsRepository(session);

const executeGetBatchEarnings = getBatchEarnings(earningsRepo);
const { earningsStream } = await executeGetBatchEarnings(req.query);
// const input = new Readable({ objectMode: true });
// input._read = () => {};
// earningsStream
// .on('data', (row) => {
// console.log(row);
// input.push(BatchEarning({ ...row }));
// })
// .on('error', (error) => {
// console.log('error', error.message);
// throw new HttpError(500, error.message);
// })
// .on('end', () => input.push(null));
const earningTransform = new Transform({
objectMode: true,
transform(chunk, encoding, callback) {
console.log(BatchEarning(chunk));
this.push(BatchEarning(chunk).toString());
callback();
},
});
// const transformedReadableStream = new Readable({
// objectMode: true,
// read(size) {
// console.log(size);
// this.push(size);
// },
// });

const asyncParser = new AsyncParser({}, { objectMode: true });
asyncParser.throughTransform(earningTransform);
const parsingProcessor = asyncParser.fromInput(earningsStream);

try {
const csv = await parsingProcessor.promise();
// parsingProcessor.throughTransform(earningTransform);
parsingProcessor
.on('data', (chunk) => console.log(chunk))
.on('end', () => console.log(csv))
.on('error', (err) => console.error(err));
console.log(csv);
// res.header('Content-Type', 'text/csv; charset=utf-8');
// res.attachment('batchEarnings.csv');
// res.send(csv);
// res.end();
const executeGetBatchEarnings = getBatchEarnings(earningsRepo);
const { earningsStream } = await executeGetBatchEarnings(req.query);
const csvStream = format({ headers: true });

earningsStream
.on('data', async (row) => {
csvStream.write(BatchEarning({ ...row }));
})
.on('error', (error) => {
console.log('error', error.message);
throw new HttpError(422, error.message);
})
.on('end', () => csvStream.end());

res.writeHead(200, {
'Content-Type': 'text/csv; charset=utf-8',
'Content-Disposition': 'attachment; filename=batchEarnings.csv',
});
csvStream.pipe(res).on('end', () => {});
} catch (err) {
console.error(err);
throw new HttpError(422, err.message);
Expand Down

0 comments on commit 0980970

Please sign in to comment.