diff --git a/src/services/identity/ReposService.ts b/src/services/identity/ReposService.ts index 8fee198cc..2c09de224 100644 --- a/src/services/identity/ReposService.ts +++ b/src/services/identity/ReposService.ts @@ -1,4 +1,3 @@ -import { exec } from "child_process" import fs from "fs" import path from "path" @@ -22,6 +21,7 @@ import { } from "@root/constants" import GitHubApiError from "@root/errors/GitHubApiError" import logger from "@root/logger/logger" +import { doesDirectoryExist } from "@root/utils/fs-utils" const SYSTEM_GITHUB_TOKEN = config.get("github.systemToken") const octokit = new Octokit({ auth: SYSTEM_GITHUB_TOKEN }) @@ -120,7 +120,7 @@ export default class ReposService { const dir = this.getLocalStagingRepoPath(repoName) // 1. Set URLs in local _config.yml - this.setUrlsInLocalConfig(dir, repoName, stagingUrl, productionUrl) + await this.setUrlsInLocalConfig(dir, repoName, stagingUrl, productionUrl) // 2. Commit changes in local repo await this.simpleGit @@ -151,14 +151,21 @@ export default class ReposService { await this.simpleGit.cwd({ path: dir, root: false }).checkout("staging") } - private setUrlsInLocalConfig( + private async setUrlsInLocalConfig( dir: string, repoName: string, stagingUrl: string, productionUrl: string ) { const configPath = `${dir}/_config.yml` - const configFile = fs.readFileSync(configPath, "utf-8") + let configFile: string + try { + configFile = await fs.promises.readFile(configPath, "utf-8") + } catch (error) { + throw new UnprocessableError( + `Error reading _config.yml for '${repoName}': ${error}` + ) + } const lines = configFile.split("\n") const stagingIdx = lines.findIndex((line) => line.startsWith("staging:")) const prodIdx = lines.findIndex((line) => line.startsWith("prod:")) @@ -169,7 +176,7 @@ export default class ReposService { } lines[stagingIdx] = `staging: ${stagingUrl}` lines[prodIdx] = `prod: ${productionUrl}` - fs.writeFileSync(configPath, lines.join("\n")) + await fs.promises.writeFile(configPath, lines.join("\n")) } createRepoOnGithub = ( @@ -227,16 +234,16 @@ export default class ReposService { const stgLiteDir = this.getLocalStagingLiteRepoPath(repoName) // Make sure the local path is empty, just in case dir was used on a previous attempt. - fs.rmSync(`${stgDir}`, { recursive: true, force: true }) + await fs.promises.rm(`${stgDir}`, { recursive: true, force: true }) // Clone base repo locally - fs.mkdirSync(stgDir) + await fs.promises.mkdir(stgDir) await this.simpleGit .cwd({ path: stgDir, root: false }) .clone(SITE_CREATION_BASE_REPO_URL, stgDir, ["-b", "staging"]) // Clear git - fs.rmSync(`${stgDir}/.git`, { recursive: true, force: true }) + await fs.promises.rm(`${stgDir}/.git`, { recursive: true, force: true }) // Prepare git repo await this.simpleGit @@ -376,9 +383,9 @@ export const createRecords = (zoneId: string): Record[] => { } async setUpStagingLite(stgLiteDir: string, repoUrl: string) { - fs.rmSync(`${stgLiteDir}`, { recursive: true, force: true }) + await fs.promises.rm(`${stgLiteDir}`, { recursive: true, force: true }) // create a empty folder stgLiteDir - fs.mkdirSync(stgLiteDir) + await fs.promises.mkdir(stgLiteDir) // note: for some reason, combining below commands led to race conditions // so we have to do it separately @@ -392,20 +399,36 @@ export const createRecords = (zoneId: string): Record[] => { .cwd({ path: stgLiteDir, root: false }) .checkout("staging") - if (fs.existsSync(path.join(`${stgLiteDir}`, `images`))) { + const doesImagesFolderExistResult = await doesDirectoryExist( + path.join(`${stgLiteDir}`, `images`) + ) + + if (doesImagesFolderExistResult.isErr()) { + throw doesImagesFolderExistResult.error + } + + if (doesImagesFolderExistResult.value) { await this.simpleGit .cwd({ path: stgLiteDir, root: false }) .rm(["-r", "images"]) } - if (fs.existsSync(path.join(`${stgLiteDir}`, `files`))) { + const doesFilesFolderExistResult = await doesDirectoryExist( + path.join(`${stgLiteDir}`, `files`) + ) + + if (doesFilesFolderExistResult.isErr()) { + throw doesFilesFolderExistResult.error + } + + if (doesFilesFolderExistResult.value) { await this.simpleGit .cwd({ path: stgLiteDir, root: false }) .rm(["-r", "files"]) } // Clear git - fs.rmSync(`${stgLiteDir}/.git`, { recursive: true, force: true }) + await fs.promises.rm(`${stgLiteDir}/.git`, { recursive: true, force: true }) // Prepare git repo await this.simpleGit diff --git a/src/services/review/RepoCheckerService.ts b/src/services/review/RepoCheckerService.ts index d478c08a2..9fd5072fc 100644 --- a/src/services/review/RepoCheckerService.ts +++ b/src/services/review/RepoCheckerService.ts @@ -27,6 +27,7 @@ import { BrokenLinkErrorDto, } from "@root/types/siteChecker" import { extractPathInfo } from "@root/utils/files" +import { doesDirectoryExist } from "@root/utils/fs-utils" import GitFileSystemService from "../db/GitFileSystemService" import { PageService } from "../fileServices/MdPageServices/PageService" @@ -77,33 +78,63 @@ export default class RepoCheckerService { this.pageService = pageService } - isCurrentlyLocked(repo: string): ResultAsync { + createLogsFolder(repo: string): ResultAsync { const logsFilePath = path.join(SITE_CHECKER_REPO_PATH, repo) - // create logs folder if it does not exist - if (!fs.existsSync(logsFilePath)) { - fs.mkdirSync(logsFilePath, { recursive: true }) - } + // create logs folder if it does not exist + return doesDirectoryExist(logsFilePath) + .mapErr((err) => { + logger.error( + `SiteCheckerError: error finding if logs directory exists for ${repo} :${err}` + ) + return new SiteCheckerError( + `Error finding if logs directory exists for ${repo} :${err}` + ) + }) + .andThen((doesLogsFolderExist) => { + if (!doesLogsFolderExist) { + return ResultAsync.fromPromise( + fs.promises.mkdir(logsFilePath, { recursive: true }), + (err) => { + logger.error( + `SiteCheckerError: Unable to create logs dir: ${err}` + ) + return new SiteCheckerError( + `SiteCheckerError: Unable to create logs dir: ${err}` + ) + } + ).map(() => true) + } + return okAsync(true) + }) + } + isCurrentlyLocked(repo: string): ResultAsync { + const logsFilePath = path.join(SITE_CHECKER_REPO_PATH, repo) // check if checker.lock exists const lockFilePath = path.join(logsFilePath, LOCK_CONTENT) - - return ResultAsync.fromPromise( - fs.promises.access(lockFilePath, fs.constants.F_OK), - (err) => new SiteCheckerError(`${err}`) - ).map(() => true) + return this.createLogsFolder(repo) + .andThen(() => + ResultAsync.fromPromise( + fs.promises.access(lockFilePath, fs.constants.F_OK), + (err) => + new SiteCheckerError( + `Unable to determine if the lock folder exists for ${repo}: ${err}` + ) + ) + ) + .map(() => true) } createLock(repo: string) { const folderPath = path.join(SITE_CHECKER_REPO_PATH, repo) - if (!fs.existsSync(folderPath)) { - fs.mkdirSync(folderPath, { recursive: true }) - } // create a checker.lock file const lockFilePath = path.join(folderPath, LOCK_CONTENT) - return ResultAsync.fromPromise( - fs.promises.writeFile(lockFilePath, "locked"), - (error) => new SiteCheckerError(`${error}`) + return this.createLogsFolder(repo).andThen(() => + ResultAsync.fromPromise( + fs.promises.writeFile(lockFilePath, "locked"), + (error) => new SiteCheckerError(`${error}`) + ) ) } @@ -375,92 +406,104 @@ export default class RepoCheckerService { fileName, viewablePageInCms, ] of mapOfMdFilesAndViewableLinkInCms) { - const file = fs.readFileSync(path.join(repoPath, fileName), "utf8") - - const filePermalink = file - .split("---") - ?.at(1) - ?.split("permalink: ") - ?.at(1) - ?.split("\n") - ?.at(0) - - if (!filePermalink) { - continue - } - const normalisedPermalink = this.normalisePermalink(filePermalink) + const siteError = ResultAsync.fromPromise( + fs.promises.readFile(path.join(repoPath, fileName), "utf8"), + (error) => + new SiteCheckerError( + `SiteCheckerError: error reading file ${fileName}: ${error}` + ) + ) + .andThen((file) => { + const filePermalink = file + .split("---") + ?.at(1) + ?.split("permalink: ") + ?.at(1) + ?.split("\n") + ?.at(0) + + if (!filePermalink) { + return errAsync("No permalink found in front matter") + } + const normalisedPermalink = this.normalisePermalink(filePermalink) - // we are only parsing out the front matter - const fileContent = file.split("---")?.slice(2).join("---") - if (!fileContent) { - continue - } - findErrors.push( - ResultAsync.fromPromise( - Promise.resolve(marked.parse(fileContent)), - (err) => new SiteCheckerError(`${err}`) - ).andThen((html) => { - const dom = new JSDOM(html) - const anchorTags = dom.window.document.querySelectorAll("a") - forEach(anchorTags, (tag) => { - const href = tag.getAttribute("href") - const text = tag.textContent - - if (!text) { - // while rendered in the dom, unlikely to be - // noticed by end citizen. - return - } - if (!tag.hasAttribute("href")) { - const error: RepoError = { - type: RepoErrorTypes.BROKEN_LINK, - linkToAsset: "", - viewablePageInCms, - viewablePageInStaging: path.join(normalisedPermalink), - linkedText: text, + // we are only parsing out the front matter + const fileContent = file.split("---")?.slice(2).join("---") + if (!fileContent) { + return errAsync("No file content found") + } + return okAsync({ fileContent, normalisedPermalink }) + }) + .andThen(({ fileContent, normalisedPermalink }) => + ResultAsync.fromPromise( + Promise.resolve(marked.parse(fileContent)), + (err) => new SiteCheckerError(`${err}`) + ).andThen((html) => { + const dom = new JSDOM(html) + const anchorTags = dom.window.document.querySelectorAll("a") + forEach(anchorTags, (tag) => { + const href = tag.getAttribute("href") + const text = tag.textContent + + if (!text) { + // while rendered in the dom, unlikely to be + // noticed by end citizen. + return } - errors.push(error) - } - if (!href) { - // could be intentional linking to self - // eg. Back to top - return - } - - if (!this.hasValidReference(setOfAllMediaAndPagesPath, href)) { - const error: RepoError = { - type: RepoErrorTypes.BROKEN_LINK, - linkToAsset: href, - viewablePageInCms, - viewablePageInStaging: path.join(normalisedPermalink), - linkedText: text, + if (!tag.hasAttribute("href")) { + const error: RepoError = { + type: RepoErrorTypes.BROKEN_LINK, + linkToAsset: "", + viewablePageInCms, + viewablePageInStaging: path.join(normalisedPermalink), + linkedText: text, + } + errors.push(error) } - errors.push(error) - } - }) - const imgTags = dom.window.document.querySelectorAll("img") - forEach(imgTags, (tag) => { - const src = tag.getAttribute("src") - if (!src) { - // while rendered in the dom, unlikely to be - // noticed by end citizen. - return - } - - //! todo: include check for post bundled files https://github.com/isomerpages/isomerpages-template/tree/staging/assets/img - if (!this.hasValidReference(setOfAllMediaAndPagesPath, src)) { - const error: RepoError = { - type: RepoErrorTypes.BROKEN_IMAGE, - linkToAsset: src, - viewablePageInCms, - viewablePageInStaging: path.join(normalisedPermalink), + if (!href) { + // could be intentional linking to self + // eg. Back to top + return } - errors.push(error) - } + + if ( + !this.hasValidReference(setOfAllMediaAndPagesPath, href) + ) { + const error: RepoError = { + type: RepoErrorTypes.BROKEN_LINK, + linkToAsset: href, + viewablePageInCms, + viewablePageInStaging: path.join(normalisedPermalink), + linkedText: text, + } + errors.push(error) + } + }) + const imgTags = dom.window.document.querySelectorAll("img") + forEach(imgTags, (tag) => { + const src = tag.getAttribute("src") + if (!src) { + // while rendered in the dom, unlikely to be + // noticed by end citizen. + return + } + + //! todo: include check for post bundled files https://github.com/isomerpages/isomerpages-template/tree/staging/assets/img + if (!this.hasValidReference(setOfAllMediaAndPagesPath, src)) { + const error: RepoError = { + type: RepoErrorTypes.BROKEN_IMAGE, + linkToAsset: src, + viewablePageInCms, + viewablePageInStaging: path.join(normalisedPermalink), + } + errors.push(error) + } + }) + return ok(undefined) }) - return ok(undefined) - }) - ) + ) + + findErrors.push(siteError) } logger.info(`Site checker for repo ${repo} completed successfully!`) return ResultAsync.combineWithAllErrors(findErrors) @@ -493,52 +536,60 @@ export default class RepoCheckerService { return false } - reporter(repo: string, errors: RepoError[]): ResultAsync { - const folderPath = path.join(SITE_CHECKER_REPO_PATH, repo) - if (!fs.existsSync(folderPath)) { - fs.mkdirSync(folderPath, { recursive: true }) - } - const filePath = path.join(SITE_CHECKER_REPO_PATH, repo, REPO_LOG) - const stringifiedErrors = this.convertToCSV(errors) - if (!fs.existsSync(filePath)) { - fs.appendFileSync(filePath, stringifiedErrors) - } else { - fs.writeFileSync(filePath, stringifiedErrors) - } + reporter( + repo: string, + errors: RepoError[] + ): ResultAsync { + return this.createLogsFolder(repo).andThen(() => { + const filePath = path.join(SITE_CHECKER_REPO_PATH, repo, REPO_LOG) + const stringifiedErrors = this.convertToCSV(errors) - const isProd = config.get("env") === "prod" - if (!isProd) { - return okAsync(errors) - } + return ResultAsync.fromPromise( + fs.promises.writeFile(filePath, stringifiedErrors), + (writeFileError) => { + logger.error( + `SiteCheckerError: Error creating log file for ${repo} :${writeFileError}` + ) + return new SiteCheckerError( + `Error creating log file for ${repo} :${writeFileError}` + ) + } + ).andThen(() => { + const isProd = config.get("env") === "prod" + if (!isProd) { + return okAsync(errors) + } - const gitOperations: - | Response - | Response = this.git - .cwd({ path: SITE_CHECKER_REPO_PATH, root: false }) - .checkout("main") - .fetch() - .merge(["origin/main"]) - .add(["."]) - .commit(`Site checker logs added for ${repo}`) - .push() - - // pushing since there is no real time requirement for user - return ResultAsync.fromPromise( - gitOperations, - (error) => new SiteCheckerError(`${error}`) - ) - .map(() => errors) - .orElse(() => { - /** - * Actually committing the repo to remote is not a critical operation, and done for observability. - * If it fails, we can still return the broken links report to the user. - * We also do not want to log the actual error as it could cause other alarms to go off. - */ - logger.info( - `SiteCheckerInfo: Error pushing logs for ${repo} to remote isomer-site-checker repo` - ) - return ok(errors) + const gitOperations: + | Response + | Response = this.git + .cwd({ path: SITE_CHECKER_REPO_PATH, root: false }) + .checkout("main") + .fetch() + .merge(["origin/main"]) + .add(["."]) + .commit(`Site checker logs added for ${repo}`) + .push() + + // pushing since there is no real time requirement for user + return ResultAsync.fromPromise< + CommitResult | PushResult, + SiteCheckerError + >(gitOperations, (error) => new SiteCheckerError(`${error}`)) + .map(() => errors) + .orElse(() => { + /** + * Actually committing the repo to remote is not a critical operation, and done for observability. + * If it fails, we can still return the broken links report to the user. + * We also do not want to log the actual error as it could cause other alarms to go off. + */ + logger.info( + `SiteCheckerInfo: Error pushing logs for ${repo} to remote isomer-site-checker repo` + ) + return ok(errors) + }) }) + }) } // adapted from https://stackoverflow.com/questions/56427009/how-to-return-papa-parsed-csv-via-promise-async-await @@ -566,37 +617,40 @@ export default class RepoCheckerService { repo: string ): ResultAsync => { const filePath = path.join(SITE_CHECKER_REPO_PATH, repo, REPO_LOG) - if (!fs.existsSync(filePath)) { - logger.error(`SiteCheckerError: Folder does not exist for ${repo}`) - return errAsync(new SiteCheckerError(`Folder does not exist for ${repo}`)) - } - return this.isCurrentlyErrored(repo).andThen((isError) => { - if (isError) { - return errAsync(new SiteCheckerError(`Error checking repo ${repo}`)) - } + return ResultAsync.fromPromise(fs.promises.stat(filePath), () => { + logger.error( + `SiteCheckerError: Error reading log file for ${repo}: file does not exist` + ) + return new SiteCheckerError(`Folder does not exist for ${repo}`) + }).andThen(() => + this.isCurrentlyErrored(repo).andThen((isError) => { + if (isError) { + return errAsync(new SiteCheckerError(`Error checking repo ${repo}`)) + } - return this.isCurrentlyLocked(repo) - .andThen(() => okAsync({ status: "loading" })) - .orElse(() => - ResultAsync.fromPromise( - this.generateRepoErrorsFromCsv(repo), - (error) => { - logger.error( - `SiteCheckerError: Error reading csv for repo ${repo}: ${error}` - ) - return new SiteCheckerError( - `Error reading csv for repo ${repo}: ${error}` - ) - } - ).andThen((errors) => - okAsync({ - errors, - status: "success", - }) + return this.isCurrentlyLocked(repo) + .andThen(() => okAsync({ status: "loading" })) + .orElse(() => + ResultAsync.fromPromise( + this.generateRepoErrorsFromCsv(repo), + (error) => { + logger.error( + `SiteCheckerError: Error reading csv for repo ${repo}: ${error}` + ) + return new SiteCheckerError( + `Error reading csv for repo ${repo}: ${error}` + ) + } + ).andThen((errors) => + okAsync({ + errors, + status: "success", + }) + ) ) - ) - }) + }) + ) } /** @@ -806,18 +860,16 @@ export default class RepoCheckerService { } outputHumanReadableErrors(repoName: string) { - const csvData = fs.readFileSync( - path.join(SITE_CHECKER_REPO_PATH, repoName, REPO_LOG), - "utf8" + return ResultAsync.fromPromise( + this.generateRepoErrorsFromCsv(repoName), + (err) => { + logger.error(`SiteCheckerError: Error reading errors from csv: ${err}`) + return new SiteCheckerError(`Error reading errors from csv: ${err}`) + } ) - - const report: string[] = [] - - Papa.parse(csvData, { - header: true, - complete(results) { - results.data.forEach((row) => { - if (!isRepoError(row)) return + .andThen((errors) => { + const report: string[] = [] + errors.forEach((row) => { if (row.type === "broken-image") { report.push(`Broken image: ${row.linkToAsset}`) report.push(`Edit page: ${row.viewablePageInCms}`) @@ -843,14 +895,25 @@ export default class RepoCheckerService { report.push("\n") } }) - }, - }) - // write report to file - fs.writeFileSync( - path.join(SITE_CHECKER_REPO_PATH, repoName, "report.txt"), - report.join("\n") - ) + return ok(report) + }) + .andThen((report) => + ResultAsync.fromPromise( + fs.promises.writeFile( + path.join(SITE_CHECKER_REPO_PATH, repoName, "report.txt"), + report.join("\n") + ), + (err) => { + logger.error( + `SiteCheckerError: Error writing report to file: ${err}` + ) + return new SiteCheckerError( + `SiteCheckerError: Error writing report to file: ${err}` + ) + } + ) + ) } traverseDirectory( diff --git a/src/services/utilServices/MailClient.ts b/src/services/utilServices/MailClient.ts index 4d4bd9ef0..b6f432ff6 100644 --- a/src/services/utilServices/MailClient.ts +++ b/src/services/utilServices/MailClient.ts @@ -51,13 +51,14 @@ class MailClient { form.append("body", body) form.append("recipient", recipient) form.append("reply_to", "noreply@isomer.gov.sg") - attachments?.forEach((attachment) => { - form.append( - "attachments", - fs.readFileSync(attachment), - path.basename(attachment) + if (attachments) { + await Promise.all( + attachments?.map(async (attachment) => { + const content = await fs.promises.readFile(attachment) + form.append("attachments", content, path.basename(attachment)) + }) ) - }) + } try { const sendMailResponse = await axios.post(sendEndpoint, form, { diff --git a/src/utils/fs-utils.ts b/src/utils/fs-utils.ts new file mode 100644 index 000000000..d759b92c9 --- /dev/null +++ b/src/utils/fs-utils.ts @@ -0,0 +1,20 @@ +/* eslint-disable import/prefer-default-export */ +import * as fs from "fs/promises" + +import { ResultAsync, errAsync, okAsync } from "neverthrow" + +export function doesDirectoryExist(path: string): ResultAsync { + return ResultAsync.fromPromise(fs.stat(path), (e) => e) + .map((stat) => stat.isDirectory()) + .orElse((e) => { + if ( + typeof e === "object" && + e !== null && + "code" in e && + e.code === "ENOENT" + ) { + return okAsync(false) + } + return errAsync(new Error(`Error occurred when checking directory: ${e}`)) + }) +}