diff --git a/services/flight-crawler/src/crawl.ts b/services/flight-crawler/src/crawl.ts index 9078b4756..6252b5848 100644 --- a/services/flight-crawler/src/crawl.ts +++ b/services/flight-crawler/src/crawl.ts @@ -10,10 +10,12 @@ import type {FetchOptions} from '@alwatr/fetch'; export async function crawlAllJobs(): Promise { logger.logMethod('crawlAllJobs'); const jobList = (await storageClient.getStorage()).data; - for (const jobId in jobList) { - if (!Object.prototype.hasOwnProperty.call(jobList, jobId)) continue; + const jobKeyList = Object.keys(jobList); + let updated = false; + + for (let i = 0; i < jobKeyList.length; i++) { try { - const job = jobList[jobId]; + const job = jobList[jobKeyList[i]]; const oldResultList = job.resultList; const resultList = await crawl(job.detail); job.resultList = resultList; @@ -21,13 +23,16 @@ export async function crawlAllJobs(): Promise { const message = makeMessage(job); await notify(config.notifier.to, message); logger.logOther(`Notified to ${config.notifier.to}!`); + await storageClient.set(job); + updated = true; } - await storageClient.set(job); } catch (err) { logger.error('crawlAllJobs', 'crawling_failed', err); } } + // for updating meta + if (updated === false) await storageClient.set(jobList[jobKeyList[jobKeyList.length - 1]]); } async function crawl(detail: JobDetail): Promise> {