Skip to content

Commit

Permalink
fix(uniquely/flight-crawler): prevent set storage repeated (#536)
Browse files Browse the repository at this point in the history
  • Loading branch information
alimd authored Dec 22, 2022
2 parents 3f02c06 + 0a89675 commit 0299b57
Showing 1 changed file with 9 additions and 4 deletions.
13 changes: 9 additions & 4 deletions services/flight-crawler/src/crawl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,24 +10,29 @@ import type {FetchOptions} from '@alwatr/fetch';
export async function crawlAllJobs(): Promise<void> {
logger.logMethod('crawlAllJobs');
const jobList = (await storageClient.getStorage()).data;
for (const jobId in jobList) {
if (!Object.prototype.hasOwnProperty.call(jobList, jobId)) continue;
const jobKeyList = Object.keys(jobList);
let updated = false;

for (let i = 0; i < jobKeyList.length; i++) {
try {
const job = jobList[jobId];
const job = jobList[jobKeyList[i]];
const oldResultList = job.resultList;
const resultList = await crawl(job.detail);
job.resultList = resultList;
if (differentObject(job.resultList, oldResultList)) {
const message = makeMessage(job);
await notify(config.notifier.to, message);
logger.logOther(`Notified to ${config.notifier.to}!`);
await storageClient.set(job);
updated = true;
}
await storageClient.set(job);
}
catch (err) {
logger.error('crawlAllJobs', 'crawling_failed', err);
}
}
// for updating meta
if (updated === false) await storageClient.set(jobList[jobKeyList[jobKeyList.length - 1]]);
}

async function crawl(detail: JobDetail): Promise<Array<JobResult>> {
Expand Down

0 comments on commit 0299b57

Please sign in to comment.