From 463651ac42dce5a61fd2ef96b60526865de770de Mon Sep 17 00:00:00 2001 From: seaerchin <44049504+seaerchin@users.noreply.github.com> Date: Thu, 30 Mar 2023 16:11:23 +0800 Subject: [PATCH] release(0.18.0): merge to master (#671) * Chore(Site launch microservices):managing cloud environments (#657) * chore(serverless): remove offline packages + more meaningful naming for clarity * chore(deps): add deps + add in commands for cloud development * chore(readme): update readme * chore(docker compose): remove local emulation of lambdas (#666) we no longer use local stack * feat(identity): phase 2 (#509) * Feat/login flow (#477) * Feat: add SessionData class * Refactor: swap out auth middleware to use sessionData * Refactor: modify sessionData in rollbackRouteHandler * Feat: update middleware and auth services * Chore: swap out v1 routes Using a 1-1 swap here for v1 routes, since these are mostly outdated or due for refactor * Refactor: swap id used in logger * Feat: add handler to attach site name to sessionData * Chore: modify v2 routes and services to use sessionData * Chore: swap whoamiAuth to verifyJwt and useSiteAccessTokenIfAvailable with checkHasAccess * Test: add fixtures for new middleware * Fix: tests * Fix: allow e2e test user * Chore: update v1 endpoint * Fix: migrate auth middle to ts * Chore: rename to usersessiondata * Refactor: split sessionData into separate classes * Chore: replace sessionData * Chore: replace githubsessiondata * Chore: add jsdoc for classes * Chore: update routes and services to pass appropriate sessionData objects * Fix: tests * Fix: specify request types * Chore: remove unnecessary comment * Fix: simplify getGithubParamsWithSite * Feat/site member verification for email (#479) * Feat: add IsomerAdmins database table and migrations * Feat: add access token via interceptor if missing * Feat: add isomerAdminsService * Feat: add hasAccessToSite to usersService * Feat: shift site membership check to authorizationMiddlewareService * Chore: replace authMiddleware.checkHasAccess with authorizationMiddleware.checkIsSiteMember * Chore: migrate authmiddlewareservice to typescript * Fix: rename auth middleware to authentication middleware * Fix: move e2e_isomer_id into constants * Chore: add cookie types * Fix: more concise check for isSiteMember * FIx: rebase errors * Fix: remove unused identityAuthService dependency * Fix: rename AuthService import as identityAuthService * Nit: separate type definition * Feat/email login flow (#480) * build(deps): bump file-type from 16.5.3 to 16.5.4 (#475) Bumps [file-type](https://github.com/sindresorhus/file-type) from 16.5.3 to 16.5.4. - [Release notes](https://github.com/sindresorhus/file-type/releases) - [Commits](https://github.com/sindresorhus/file-type/compare/v16.5.3...v16.5.4) --- updated-dependencies: - dependency-name: file-type dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * fix: package.json & package-lock.json to reduce vulnerabilities (#476) The following vulnerabilities are fixed with an upgrade: - https://snyk.io/vuln/SNYK-JS-SEQUELIZE-2959225 * build(deps): bump vm2 from 3.9.5 to 3.9.7 (#350) Bumps [vm2](https://github.com/patriksimek/vm2) from 3.9.5 to 3.9.7. - [Release notes](https://github.com/patriksimek/vm2/releases) - [Changelog](https://github.com/patriksimek/vm2/blob/master/CHANGELOG.md) - [Commits](https://github.com/patriksimek/vm2/compare/3.9.5...3.9.7) --- updated-dependencies: - dependency-name: vm2 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * Chore: remove site links from description (#482) * Fix: update resource room (#481) * 0.10.0 * fix: remove unnecessary update step (#487) * 0.10.1 * Chore: update commit message to include user id * Feat: add login and verify endpoints * Fix: model relations and alias * Feat: add findSitesByUserId * Feat: add site retrieval for email and admin users * Fix: hasAccessToSite * Fix: update email/mobile by isomer id * Chore: update error message * Fix: await check for whitelist * Chore: add mockSessionData for email login * Fix: SiteService behaviour for email users with no whitelisted sites * Test: update sitesservice tests * Test: add new authservice tests and fix existing tests * Fix: update user model to allow null in github field * Fix: update test fixture * Fix: update user test suite * Chore: remove unused endpoint * Fix: rebase errors * Chore: remove unnecessary message in test * Chore: remove unnecessary userId field * Nit: rename variable * Refactor: shift site retrieval for email users into helper method * Chore: spacing and remove unused var * Fix: tests * Tests: add new authorizationMiddlewareService test * fix: remove resources_name and add support for url (#490) * fix: remove resources_name and add support for url * fix: display url parameter as domain but store with https scheme * fix: resolve failing tests * Chore: flip conditional * Refactor: shift order of getSites to make it easier to understand * Test: add new auth router tests * Feat: add integration tests for getSites * Fix: failing requests for getLastUpdated and getStagingUrl * Nit: add comment * Nit: test name and var name * chore(mocks/axios): remove extra stuff * test(sites.spec): refactor specs for clarity * Fix: update settings * Nit: update comment * Fix: tests Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Snyk bot Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> Co-authored-by: seaerchin Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Snyk bot Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> Co-authored-by: seaerchin * Fix: e2e bypass of authorization middleware Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Snyk bot Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> Co-authored-by: seaerchin * feat: collaborators (#510) * build(devdeps): add lodash types * fix(model): rectify db model definitions * refactor: add message param to ForbiddenError * feat: add CollaboratorsService * test: add tests for CollaboratorsService * feat: use CollaboratorService in authorization middleware * test: add tests for authorization middleware * feat: add CollaboratorsRouter * test: add tests for CollaboratorsRouter * feat(db-migration): change site_members role enum in the database * feat: modify authzMiddlewareService tests * fix: error in mock collaborators fixture * ref(services): migrate SitesService to TypeScript (#512) * ref(fixtures): convert repoInfo to typescript * ref(services): migrate SitesService to typescript * tests: update unit and integration tests for SitesService * ref(sites): migrate sites router to typescript * fix: revert back to using SessionData * fix: remove use of Bluebird and unused getSiteToken function * fix: use more accurate type * chore: remove unused variable * refactor(tests): migrate generic axios instance to __mocks__ * tests: use mockAxios directly instead of preparing an instance * feat(rr): db migrations (#515) * chore(infraservice): remove unused prop * chore(routes/auth): remove unused var * build(package): add useful command to undo migration * feat(migrations): add migrations for review requests fix(db migratino): update property names * chore(db migration): add migration to add status col to rr * chore(db migrations): add required columns for seq creation * feat: introduce a new site info API endpoint (#513) * ref(fixtures): convert repoInfo to typescript * ref(services): migrate SitesService to typescript * tests: update unit and integration tests for SitesService * ref(sites): migrate sites router to typescript * fix: revert back to using SessionData * fix: remove use of Bluebird and unused getSiteToken function * fix: use more accurate type * chore: remove unused variable * refactor(tests): migrate generic axios instance to __mocks__ * feat: introduce function to obtain latest commit details * feat: add function for obtaining a User by ID * feat: introduce a new site info API endpoint * tests: add partial tests for SitesService * tests: use mockAxios directly instead of preparing an instance * tests: fix SitesService unit tests to pass * chore: adjust constants to use SCREAMING_SNAKE_CASE * fix: add authorizationMiddleware to ensure user is member of site * chore: combine sessionData unpacking * fix: insert try-catch to handle errors from JSON.parse * chore: remove unnecessary check for undefined site * chore: return instead of throwing NotFoundError * fix: add assertion to ensure integrity of GitHubCommitData * fix: remove need for adding site name to sessionData * refactor: convert routes Sites.spec.js to TypeScript * refactor: redesign getUrlsOfSite to increase readability * fix: use correct endpoint to get latest commit data * test: add unit tests for GitHubService getLatestCommitOfBranch * fix: add stub for obtaining merge author details * fix: return a well-formatted response for known exceptions * test: enhance GitHubService test for all other error statuses * chore: rename isType function and return boolean directly * fix: create new siteUrls object instead of changing in-place * fix: handle case of null or undefined user email * chore: improve code style * tests: fix output of getStagingUrl * feat: add collaborators statistics API endpoint (#520) * ref(fixtures): convert repoInfo to typescript * ref(services): migrate SitesService to typescript * tests: update unit and integration tests for SitesService * ref(sites): migrate sites router to typescript * fix: revert back to using SessionData * fix: remove use of Bluebird and unused getSiteToken function * fix: use more accurate type * chore: remove unused variable * refactor(tests): migrate generic axios instance to __mocks__ * feat: introduce function to obtain latest commit details * feat: add function for obtaining a User by ID * feat: introduce a new site info API endpoint * tests: add partial tests for SitesService * tests: use mockAxios directly instead of preparing an instance * tests: fix SitesService unit tests to pass * chore: adjust constants to use SCREAMING_SNAKE_CASE * fix: add authorizationMiddleware to ensure user is member of site * chore: combine sessionData unpacking * fix: insert try-catch to handle errors from JSON.parse * chore: remove unnecessary check for undefined site * chore: return instead of throwing NotFoundError * fix: add assertion to ensure integrity of GitHubCommitData * fix: remove need for adding site name to sessionData * refactor: convert routes Sites.spec.js to TypeScript * refactor: redesign getUrlsOfSite to increase readability * feat: add collaborators statistics API endpoint * test: add unit tests for collaborators statistics * fix: return 404 instead of throwing an exception * tests: add test to check for 404 status * refactor: remove all usages of TokenStore and tests (#533) * feat(rr): add database models (#518) * chore(infraservice): remove unused prop * chore(routes/auth): remove unused var * build(package): add useful command to undo migration * feat(migrations): add migrations for review requests fix(db migratino): update property names * chore(db migration): add migration to add status col to rr * chore(db migrations): add required columns for seq creation * feat(db/models): add new db models for rr fix(db models): update db models fix(reviewmeat): update db model * chore(reviewrequest): update db model for seq * fix(reviewmeta): add annotation on db model * chore(reviewrequest.ts): add col to db model * fix(teardown): add enum dropping for tests teardown * feat: add new siteUrl API endpoint to get the production URL of site (#535) * Feat/notifications (#508) * Feat: add notification database model and update related models * Chore: add migrations Also adds id to site_members table for easier reference * Feat: add notificationService * Feat: add notificationUtils * Feat: add notifications router * Chore: initialise Notifications table and services * Fix: remove unused imports * Fix: change behaviour of quick retrieval Always returns only new notifications now, unless there are none, in which case it returns most recent 6 * Chore: remove unused imports * Refactor: findAll method * Chore: add notificationResponse type * Feat: add created_at sorting criteria * Fix: notification sorting order * Chore: add documentation for sort criteria * Fix: rebase errors * Fix: rebase errors for tests * feat(rr): services + routes (#519) * feat(services): add initial services for rr * feat(types): add github types * feat(reviewrequestservice): add features * chore(review): wip for review routes * feat(types): add new types * feat(reviewrequestservice): add impl for computing sha mappings * feat(usersservice): add nwe method to user service to retrieve site admins * feat(review): add route for creating review request * refactor(collaboratorsservice): refactor method api for clarity * feat(types): add more types * refactor(collaborators): fix typings and add more steuff to return * chore(routes): update authenticated routes * chore(review): refactor to use collaborators service * refactor(reviewrequestservice): update methods * chore(server): add init code * refactor(reviewmeta): updat eto use belongs to * feat(types): add more types * feat(requestnotfounderror): add new error type * feat(review): add methods for listing review requests and retrieval of rr * refactor(types/dto): update review types * refactor(rrservice): update enum type * feat(rrservice): add new method to merge rr * feat(review): add new route to merge rr * fix(collaborators): remove erroneous destructuring * fix(routes/review): add siteId prop * chore(review dto): add status * chore(requestrequestservice): remove old comment * fix(reviewrequestservice): changed some stuff to be optional * refactor(rr service): split retrieving db/github view into 2 methods * feat(rr service): add methods to close/approve rr * refactor(rr service): refactor merge rr method * chore(collaboratorsservice): remove extra typecasts * feat(rr): add new endpoint to update rr * chore(types): minor cleanup * feat(rr routes): add new routes for close and approve pr * chore(review): update to userwithsitesesiontoken * refactor(reviewrequestservice): migrate api calls into own file * refactor(authenticated): shift review router dpes to init function * fix(index): fixed faulty init * refactor(reviewrequestservice): add site to reviewreq object * fix(review.ts): add explicit bearer token to api call * refactor(rrservice): refactor to retrieve user from db * chore(settingsservice): remove extra console log * chore(github): remove extra `patch` property * chore(review): add logging * fix(server): update imports from rebase * Chore: Update src/routes/v2/authenticated/review.ts Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> * Chore: Update src/routes/v2/authenticated/review.ts Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> * feat(rr status): add rr status enum * chore(rr): update db model * refactor(rrservice): updat to use enum * chore(rrservice): fix commennt * chore(review): update error codes * chore(usersservice): rename hasAccess to getSiteMember * chore(usersservice): update method name * feat(rr): allow updating of admins (#539) * chore(server): add init code * chore(dto): removed trailing space on folder name * refactor(reviewrequestservice): update to remove title/desc from update api * chore(collaborators): update import * chore(routes/review): updaterr api Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> * feat: add model and migration for review_request_views table (#521) * feat: add model and migration for review_request_views table * chore: rename migration * fix: add missing association with ReviewRequest model * fix: revert adding association in ReviewRequest model * fix: missing @Column for primary and foreign key * fix: add new database table to sequelize * feat: add review request views API endpoint and functions (#532) * feat: add review request views API endpoint and functions * fix: adjust to use Promise.all to allow concurrent creations * chore: adjust naming of variable to be more reflective of state * Expose new API endpoint to update lastViewedAt timestamp * fix(server): removed extra db assoc * fix(formsgsitecreation): removed extra prop * Fix: reviewRequestId instead of reviewId * fix(server.js): add reviewrequestview init * feat: add endpoint to mark a review request as viewed (#550) * feat: add endpoint to mark a review request as viewed * fix: use upsert for updateReviewRequestLastViewedAt * Feat/comments (#534) * Feat: add methods for retrieving comments from github * Chore: add types * Feat: add comments methods to reviewRequestService * Feat: add comments routes * fix: check for properly formatted comments * Chore: remove incorrect comments * Fix: remove error return type * Fix: add logging if site not found * Feat: swap use of email in github commit to userid * Fix: response type * Fix: rename method and add github comment type * fix: compute the number of new comments to show (#549) * fix: compute the number of new comments to show * chore: adjust naming of variable and structure of code * chore: split getting number of new comments into 2 lines Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> * feat(rr): adds be for unapproval of rr (#540) * feat(rr): delete rr approval * chore(review): update numbering and logging * chore(review): update to send rather than empty json * fix(rrservice): update to use enum * fix: use review request requestor email for production commit email (#552) * fix: include full model in order by due to presence of alias (#554) * fix(server): swap order for auth to avoid nemspace conflicts * fix(review): allow rr creation by users of the site (#556) * tests: add remaining tests for identity features in SitesService (#566) * chore: define more specific constants for source of URLs * chore: move siteUrls type to repoInfo * tests: add remaining tests for identity features * tests: add spy method to check if extractAuthorEmail was called * chore(collaborators): update typings (#568) * chore(collaborators): update typings to be accurate * chore(collaborators): update typings * tests: add unit tests for ReviewRequestService (#569) * chore: remove TODO comment as it is already done * fix: should get the count of unread comments instead * tests: add ReviewRequestService unit tests * chore: move tests back to identity services * chore: remove unused variable * chore: adjust naming of test to be clearer on the situation * ref(fixtures): allow generating router for different user types (#574) * ref(fixtures): allow generating router for different user types * fix: switch to specifying the siteName directly * feat: allow adding a default user session * Fix/reference sitename from repo (#577) * Fix: use name from Repo instead of Site * Fix: tests * Fix: run tests sequentially * Fix: remove required from includes * Feat/add middleware for verifying email user (#573) * Feat: add middleware for email user * feat: add email verification middleware to collaborators * tests: add integration tests for review requests and comments (#576) * fix: force tests to run sequentially * fix: reset the state of the tables before running the tests * feat: add fixtures used for review requests * fix: await results from IdentityService * tests: add partial integration tests for review requests * chore: switch site name to use repo name instead * chore: switch to using variable for file name * chore: standardise the response codes for non-happy paths * fix: add additional sanity checks for comments-related features * tests: add remaining integration tests for review requests * fix: add methods to generate router for default values * chore: rename test to be more reflective of its purpose * fix: add missing return and await when updating database * chore: add comment on the need for Table.sync * fix(collaborators): add required middleware (#586) * fix(collaborators): add required middleware * chore(review): add missing await * tests: add unit tests for review requests routes (#582) * feat(rr): blob diffs (#575) * feat(review): add service methods to get raw blob from gh * feat(review): add route to retrieve blob diff * chore(review): add comment for head refs at present * fix(review): update types, fix header and remove extra await * Feat/notifications tests (#514) * Feat: add notificationService tests * Test: add Notification router tests * Test: add integration test for notifications * Fix: notification tests to work with new behaviour * Chore: swap constants to SCREAMING_SNAKE_CASE * Chore: fix test description * Chore: add comments * Chore: modify test names * Fix: cleanup tables between tests * Feat/generic notification creation (#523) * Feat: move notifications router to authenticated subrouter * Feat: modify create notification arguments * Feat: add notification middleware to handle edit notifications * Fix: allow next on routeHandler * Feat: add creatnotification middleware to authenticatesSitesRouter * Chore: add notification types * Feat: add notification changes when reviews are modified * Fix: swap order of subrouters * Fix: review request router dependencies * Fix: time period for updating notification * Fix: updating createdAt * Fix: tests * Chore: swap to promise.all * fix: rebase errors * Fix: add jsdoc * Feat: add link to notificationHandler * Fix: swap to promise.all and add links * fix(review): fixed triggering event for request approved notif * chore(notifications.spec): added correct ports * Fix: update message on update notification * Fix: notification integration tests * Feat: add generic notification handler tests (#572) * Feat: add generic notification handler tests * Fix: router initialisation and cleanup * fix: update github actions to use runInBand * Fix: update tests * Fix: reset database tables before integration tests * Chore: modify imports * Nit: test comment spelling Co-authored-by: Kishore <42832651+kishore03109@users.noreply.github.com> * Nit: comments and change var names Co-authored-by: Kishore <42832651+kishore03109@users.noreply.github.com> Co-authored-by: seaerchin Co-authored-by: Kishore <42832651+kishore03109@users.noreply.github.com> * Vapt: merge back to tracking (#647) * fix(notificationonedithandler): add check for email user * fix(review.ts): update routes access control (#589) * fix: prevent caching of backend API resp (#616) * fix: sanitize file content (#591) * feat: sanitize content via markdown utils * deps: use isomorphic-dompurify instead removes the need to declare a virtual DOM in node * feat: add sanitizedYaml helpers * feat: use sanitizedYaml helpers in codebase * test: add tests for sanitizedYaml helpers * chore: fix silly line break issues * test: add tests for markdown sanitization utils * fix: add express import that was removed by accident * chore: specify return types in yaml helpers * fix: make sanitizeYamlParse return type more specific * fix: hide contactNumber (#584) * Fix: query for site through repo (#630) * Fix/make login response indistinguishable (#624) * feat: add extra logging for mail failure * fix: always return 200 when attempting to retrieve otp * chore: update logging for sms * Fix: use logger.error * feat: Added virus scan functionality for file upload (VAPT finding) (#631) * feat: Added virus scan functionality for file/image upload using Cloudmersive API * Fixed lint and naming convention issue * uncomment the medianameChecks lines * Addressed PR comments: added more info/error logging and consistent message * Addressed PR comments: added check for Cloudmersive API key, corrected variable naming convention * Removed unused declaration of schema * auto-formatted by prettier and fixed formatting issue * chore: fix formatting on package-lock --------- Co-authored-by: Alexander Lee * Feat/swap jwt to session (#619) * Chore: install new dependencies * Chore: add migration * Feat: add session middleware * feat: replace jwt with session * feat: update middleware * feat: update auth routes * chore: update method names * Fix: tests * chore: update .env-example * chore: rename session middleware * fix: use lodash isempty * fix: .env-example * chore: add logging to login and logout endpoints * Fix: remove log on logout Cookie may no longer exist * fix: tests * chore: fix rebase errors * Feat/otp mechanism (#636) * feat: adding bcrypt as dependency * feat!: adding otp table and migration * feat: logic complete for otp service * test: fix auth service tests * fix: user service tests * feat: fix tests and improve checks * fix: package-lock version formatting * feat: adding env vars to example * fix: remove console log * fix: adding env vars to .env.test * fix: parsed env vars with check * fix: simplify find logic * fix: remove unnecessary null check * fix: throw instead of return * test: adding more tests for otp mechanism * test: add more cases, improve existing cases * feat: simplify methods without enums * fix: auth service otp tests * Feat/throttling (#641) * build(package): install rate limiting package * feat(auth): add new rate limiter service and use it in auth router * test(auth.spec): add skeleton for test cases for auth * test(auth.spec): add tests for rate limiting * refactor(ratelimiter.spec): shift to own test file * chore(server): add trust proxy to our rate limiter * refactor(ratelimiter): read from env var * chore(auth.spec): remove unused import * test(sitesservice.spec): update where condition * fix(mediafileservice): revert api; fixed tests * Chore: remove temp ip endpoint * Fix: remove set number of max hops * Fix: remove unused import * chore: remove comment * chore: change 1 to true --------- Co-authored-by: seaerchin * feat: add gitguardian hook and update readme (#642) * chore: use trust proxy * feat: add gitguardian hook and readme * fix: remove pre-commit config * fix: fix nits * fix: add ggshield cache files to gitignore * feat: use env for gitguardian * chore: update readme and env example --------- Co-authored-by: Alexander Lee * fix: wrong constant name used in test (#645) --------- Co-authored-by: seaerchin Co-authored-by: seaerchin <44049504+seaerchin@users.noreply.github.com> Co-authored-by: Preston Lim Co-authored-by: Qilu Xie Co-authored-by: Harish Co-authored-by: Harish * Fix: rebase errors * Fix: error message for review request * Fix: tests * feat: use convict for env vars (#646) * feat: add convict as dependency * feat: config wip * feat: add convict schema with defaults and validation * feat: add config alias and update dependencies * feat: replace process.env with config.get (wip) * fix: use import over require * feat: wip to use config * feat: add cloudmersive key as env * feat: wip to use config * feat: update config properties * feat: update imports and use config * chore: update env.test and imports * chore: standardise env * chore: remove dotenv * chore: format eslintrc * fix: rename convict format and env enums * chore: remove DOMAIN_WHITELIST from env * chore: revert trust proxy change * fix: cookie domain to be enums * feat: update to use config * fix: remove checks for env in logic * fix: make gitGuardian optional env * feat: add staging as option for node env * chore(app): chagne to import syntax * fix: remove string coercing for env key * chore: remove check for undefine site create form key env * chore: update package-lock --------- Co-authored-by: seaerchin * fix: make default empty strings (#658) * Fix/convict fixes (#660) * fix: make site launch vars optional, remove redundant ones * fix: remove duplicate express-session in package json * Chore/change node env of local dev to expect dev (#659) * chore: update references to node_env * Fix: remove .toLowerCase when retrieving config * Chore: add vapt to accepted application environments * Refactor: use isSecure * chore: replace IS_LOCAL_DEV with IS_DEV * Fix: change isSecure to variable instead of func (#664) * Fix/convict fixes (#663) * fix: update database config to use process.env * feat: strengthen validation for number check * fix: add try-catch to convict validate to prevent hard failure on deployment * fix: rename to use whole number * fix: console log convict err * fix: update check for whole to natural number * fix: rename back to positive number * fix: add radix param * chore(package-lock): installed using old npm --------- Co-authored-by: Alexander Lee Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Snyk bot Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> Co-authored-by: Preston Lim Co-authored-by: Kishore <42832651+kishore03109@users.noreply.github.com> Co-authored-by: Qilu Xie Co-authored-by: Harish Co-authored-by: Harish * 0.18.0 --------- Co-authored-by: Alexander Lee Co-authored-by: Kishore <42832651+kishore03109@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Snyk bot Co-authored-by: Hsu Zhong Jun <27919917+dcshzj@users.noreply.github.com> Co-authored-by: Preston Lim Co-authored-by: Qilu Xie Co-authored-by: Harish Co-authored-by: Harish --- .env-example | 18 +- .env.test | 44 +- .eslintrc.json | 239 +- .github/workflows/ci.yml | 2 +- .gitignore | 1 + .husky/pre-commit | 1 + CHANGELOG.md | 19 +- README.md | 31 + docker-compose.yml | 22 - jest.config.js | 1 + microservices/README.md | 15 + microservices/package-lock.json | 227 +- microservices/package.json | 10 +- microservices/serverless.yml | 18 +- package-lock.json | 5689 ++++++----------- package.json | 21 +- src/__mocks__/axios.ts | 3 - src/bootstrap/index.ts | 31 +- src/classes/Collection.js | 27 +- src/classes/Config.js | 23 +- src/classes/Directory.js | 6 +- src/classes/File.js | 6 +- src/classes/GithubSessionData.ts | 24 + src/classes/NetlifyToml.js | 6 +- src/classes/Resource.js | 13 +- src/classes/ResourceRoom.js | 45 +- src/classes/Settings.js | 22 +- src/classes/UserSessionData.ts | 54 + src/classes/UserWithSiteSessionData.ts | 27 + src/classes/index.ts | 3 + src/config/config.ts | 348 + src/constants/constants.ts | 41 + src/database/config.js | 12 +- .../20220726094614-create-isomer-admin.js | 36 + ...03091224-change-users-github-allow-null.js | 23 + .../20220811070630-change-role-enum.js | 59 + ...6081632-change-primary-key-site-members.js | 80 + .../20220926081632-create-notifications.js | 80 + .../20221003052424-review-request-creation.js | 43 + .../20221003123422-review-meta-creation.js | 46 + .../20221003130006-reviewer-creation.js | 43 + .../20221007124138-create-review-status.js | 30 + ...21012064037-create-review-request-views.js | 59 + .../migrations/20230125033437-add-sessions.js | 28 + .../migrations/20230214055456-create-otps.js | 49 + src/database/models/IsomerAdmin.ts | 32 + src/database/models/Notification.ts | 88 + src/database/models/Otp.ts | 61 + src/database/models/ReviewMeta.ts | 44 + src/database/models/ReviewRequest.ts | 69 + src/database/models/ReviewRequestView.ts | 56 + src/database/models/Reviewers.ts | 16 + src/database/models/Site.ts | 8 +- src/database/models/SiteMember.ts | 26 +- src/database/models/User.ts | 17 +- src/database/models/index.ts | 7 + src/errors/ForbiddenError.js | 4 +- src/errors/RequestNotFoundError.ts | 10 + src/fixtures/app.js | 18 - src/fixtures/app.ts | 156 + src/fixtures/github.ts | 139 + src/fixtures/identity.ts | 181 +- src/fixtures/markdown-fixtures.ts | 48 + src/fixtures/notifications.ts | 52 + src/fixtures/{repoInfo.js => repoInfo.ts} | 30 +- src/fixtures/review.ts | 89 + src/fixtures/sessionData.ts | 73 + src/fixtures/sites.ts | 132 + src/fixtures/users.ts | 55 + src/fixtures/yaml-fixtures.ts | 65 + .../NotificationOnEditHandler.spec.ts | 239 + src/integration/Notifications.spec.ts | 504 ++ src/integration/Reviews.spec.ts | 1882 ++++++ src/integration/Sites.spec.ts | 248 + src/integration/Users.spec.ts | 308 +- src/logger/logger.js | 8 +- src/middleware/__tests__/authorization.ts | 111 + src/middleware/apiLogger.js | 7 +- src/middleware/auth.js | 49 - src/middleware/authentication.ts | 47 + src/middleware/authorization.ts | 79 + src/middleware/index.ts | 77 +- src/middleware/notificationOnEditHandler.ts | 78 + src/middleware/routeHandler.js | 17 +- src/routes/formsgSiteCreation.ts | 12 +- src/routes/v1/auth.js | 58 +- src/routes/v1/authenticated/index.js | 6 +- src/routes/v1/authenticated/sites.js | 21 +- .../v1/authenticatedSites/collectionPages.js | 27 +- .../v1/authenticatedSites/collections.js | 26 +- src/routes/v1/authenticatedSites/directory.js | 3 +- src/routes/v1/authenticatedSites/documents.js | 21 +- src/routes/v1/authenticatedSites/folders.js | 19 +- src/routes/v1/authenticatedSites/homepage.js | 6 +- src/routes/v1/authenticatedSites/images.js | 21 +- src/routes/v1/authenticatedSites/index.js | 13 +- .../v1/authenticatedSites/mediaSubfolder.js | 11 +- .../v1/authenticatedSites/navigation.js | 16 +- .../v1/authenticatedSites/netlifyToml.js | 3 +- src/routes/v1/authenticatedSites/pages.js | 30 +- .../v1/authenticatedSites/resourcePages.js | 18 +- .../v1/authenticatedSites/resourceRoom.js | 12 +- src/routes/v1/authenticatedSites/resources.js | 15 +- src/routes/v1/authenticatedSites/settings.js | 6 +- src/routes/v2/__tests__/Auth.spec.js | 72 +- src/routes/v2/auth.js | 72 +- .../__tests__/NetlifyToml.spec.js | 9 +- .../__tests__/Notifications.spec.ts | 113 + .../v2/authenticated/__tests__/Sites.spec.js | 113 - .../v2/authenticated/__tests__/Sites.spec.ts | 143 + .../__tests__/collaborators.spec.ts | 229 + .../v2/authenticated/__tests__/review.spec.ts | 1324 ++++ src/routes/v2/authenticated/collaborators.ts | 169 + src/routes/v2/authenticated/index.js | 48 +- src/routes/v2/authenticated/netlifyToml.js | 8 +- src/routes/v2/authenticated/notifications.ts | 105 + src/routes/v2/authenticated/review.ts | 1297 ++++ src/routes/v2/authenticated/sites.js | 79 - src/routes/v2/authenticated/sites.ts | 144 + src/routes/v2/authenticated/users.ts | 24 +- .../__tests__/CollectionPages.spec.js | 24 +- .../__tests__/Collections.spec.js | 98 +- .../__tests__/ContactUs.spec.js | 10 +- .../__tests__/Homepage.spec.js | 16 +- .../__tests__/MediaCategories.spec.js | 58 +- .../__tests__/MediaFiles.spec.js | 18 +- .../__tests__/Navigation.spec.js | 10 +- .../__tests__/ResourceCategories.spec.js | 58 +- .../__tests__/ResourcePages.spec.js | 14 +- .../__tests__/ResourceRoom.spec.js | 19 +- .../__tests__/UnlinkedPages.spec.js | 20 +- .../v2/authenticatedSites/collectionPages.js | 172 +- .../v2/authenticatedSites/collections.js | 104 +- src/routes/v2/authenticatedSites/contactUs.js | 20 +- src/routes/v2/authenticatedSites/homepage.js | 27 +- src/routes/v2/authenticatedSites/index.js | 13 +- .../v2/authenticatedSites/mediaCategories.js | 57 +- .../v2/authenticatedSites/mediaFiles.js | 72 +- .../v2/authenticatedSites/navigation.js | 20 +- .../authenticatedSites/resourceCategories.js | 53 +- .../v2/authenticatedSites/resourcePages.js | 123 +- .../v2/authenticatedSites/resourceRoom.js | 41 +- src/routes/v2/authenticatedSites/settings.js | 26 +- .../v2/authenticatedSites/unlinkedPages.js | 68 +- src/server.js | 153 +- src/services/api/AxiosInstance.ts | 20 +- .../configServices/NetlifyTomlService.js | 8 +- .../configServices/SettingsService.js | 40 +- .../__tests__/NetlifyTomlService.spec.js | 17 +- .../__tests__/SettingsService.spec.js | 49 +- src/services/db/GitHubService.js | 161 +- .../db/__tests__/GitHubService.spec.js | 177 +- src/services/db/review.ts | 146 + .../directoryServices/BaseDirectoryService.js | 88 +- .../CollectionDirectoryService.js | 40 +- .../MediaDirectoryService.js | 64 +- .../ResourceDirectoryService.js | 29 +- .../ResourceRoomDirectoryService.js | 41 +- .../SubcollectionDirectoryService.js | 8 +- .../UnlinkedPagesDirectoryService.js | 8 +- .../__tests__/BaseDirectoryService.spec.js | 97 +- .../CollectionDirectoryService.spec.js | 53 +- .../__tests__/MediaDirectoryService.spec.js | 47 +- .../ResourceDirectoryService.spec.js | 52 +- .../ResourceRoomDirectoryService.spec.js | 49 +- .../SubcollectionDirectoryService.spec.js | 15 +- .../MdPageServices/CollectionPageService.js | 28 +- .../MdPageServices/ContactUsPageService.js | 14 +- .../MdPageServices/HomepagePageService.js | 8 +- .../MdPageServices/MediaFileService.js | 73 +- .../MdPageServices/ResourcePageService.js | 25 +- .../SubcollectionPageService.js | 36 +- .../MdPageServices/UnlinkedPageService.js | 22 +- .../__tests__/MediaFileService.spec.js | 34 +- .../YmlFileServices/CollectionYmlService.js | 72 +- .../YmlFileServices/ConfigYmlService.js | 13 +- .../YmlFileServices/FooterYmlService.js | 17 +- .../YmlFileServices/NavYmlService.js | 36 +- .../__tests__/CollectionYmlService.spec.js | 33 +- .../__tests__/NavYmlService.spec.js | 9 +- src/services/identity/AuthService.ts | 12 +- src/services/identity/CollaboratorsService.ts | 292 + src/services/identity/DeploymentClient.ts | 5 +- src/services/identity/IsomerAdminsService.ts | 27 + src/services/identity/NotificationsService.ts | 241 + src/services/identity/OtpService.ts | 28 + src/services/identity/ReposService.ts | 4 +- src/services/identity/SitesService.ts | 442 +- src/services/identity/SmsClient.ts | 11 +- src/services/identity/TokenStore.ts | 37 - src/services/identity/UsersService.ts | 238 +- .../identity/__tests__/AuthService.spec.ts | 24 +- .../__tests__/CollaboratorsService.spec.ts | 545 ++ .../__tests__/NotificationsService.spec.ts | 211 + .../identity/__tests__/SitesService.spec.ts | 1379 +++- .../__tests__/SmsClient.spec.ts | 20 +- .../identity/__tests__/TokenStore.spec.ts | 48 - .../__tests__/TotpGenerator.spec.ts | 0 .../identity/__tests__/UsersService.spec.ts | 22 +- src/services/identity/index.ts | 60 +- src/services/infra/InfraService.ts | 7 +- .../AuthMiddlewareService.js | 144 - .../AuthenticationMiddlewareService.ts | 109 + .../AuthorizationMiddlewareService.ts | 99 + .../AuthorizationMiddlewareService.spec.ts | 139 + src/services/moverServices/MoverService.js | 20 +- src/services/review/ReviewRequestService.ts | 626 ++ .../__tests__/ReviewRequestService.spec.ts | 1151 ++++ src/services/utilServices/AuthService.js | 84 +- src/services/utilServices/MailClient.ts | 19 +- src/services/utilServices/RateLimiter.ts | 20 + src/services/utilServices/SitesService.js | 120 - .../__tests__/AuthService.spec.js | 117 +- .../utilServices/__tests__/MailClient.spec.ts | 4 +- .../__tests__/RateLimiter.spec.ts | 31 + .../__tests__/SitesService.spec.js | 149 - src/tests/database.ts | 14 + src/tests/teardown.ts | 31 + src/types/commitData.ts | 8 + src/types/configYml.ts | 4 + src/types/dto/error.ts | 3 + src/types/dto/review.ts | 65 + src/types/error.ts | 5 + src/types/express/session.d.ts | 8 + src/types/github.ts | 88 + src/types/repoInfo.ts | 23 + src/types/request.ts | 2 +- src/types/review.ts | 15 + src/types/siteInfo.ts | 8 + src/utils/__tests__/markdown-utils.spec.ts | 39 + src/utils/__tests__/yaml-utils.spec.ts | 28 + src/utils/auth-utils.js | 12 +- src/utils/file-upload-utils.js | 39 +- src/utils/jwt-utils.js | 8 +- src/utils/markdown-utils.js | 20 +- src/utils/mutex-utils.js | 12 +- src/utils/notification-utils.ts | 49 + src/utils/time-utils.ts | 2 + src/utils/utils.js | 4 +- src/utils/yaml-utils.ts | 11 + tsconfig.json | 1 + 241 files changed, 20490 insertions(+), 6465 deletions(-) create mode 100644 microservices/README.md create mode 100644 src/classes/GithubSessionData.ts create mode 100644 src/classes/UserSessionData.ts create mode 100644 src/classes/UserWithSiteSessionData.ts create mode 100644 src/classes/index.ts create mode 100644 src/config/config.ts create mode 100644 src/database/migrations/20220726094614-create-isomer-admin.js create mode 100644 src/database/migrations/20220803091224-change-users-github-allow-null.js create mode 100644 src/database/migrations/20220811070630-change-role-enum.js create mode 100644 src/database/migrations/20220926081632-change-primary-key-site-members.js create mode 100644 src/database/migrations/20220926081632-create-notifications.js create mode 100644 src/database/migrations/20221003052424-review-request-creation.js create mode 100644 src/database/migrations/20221003123422-review-meta-creation.js create mode 100644 src/database/migrations/20221003130006-reviewer-creation.js create mode 100644 src/database/migrations/20221007124138-create-review-status.js create mode 100644 src/database/migrations/20221012064037-create-review-request-views.js create mode 100644 src/database/migrations/20230125033437-add-sessions.js create mode 100644 src/database/migrations/20230214055456-create-otps.js create mode 100644 src/database/models/IsomerAdmin.ts create mode 100644 src/database/models/Notification.ts create mode 100644 src/database/models/Otp.ts create mode 100644 src/database/models/ReviewMeta.ts create mode 100644 src/database/models/ReviewRequest.ts create mode 100644 src/database/models/ReviewRequestView.ts create mode 100644 src/database/models/Reviewers.ts create mode 100644 src/errors/RequestNotFoundError.ts delete mode 100644 src/fixtures/app.js create mode 100644 src/fixtures/app.ts create mode 100644 src/fixtures/github.ts create mode 100644 src/fixtures/markdown-fixtures.ts create mode 100644 src/fixtures/notifications.ts rename src/fixtures/{repoInfo.js => repoInfo.ts} (57%) create mode 100644 src/fixtures/review.ts create mode 100644 src/fixtures/sessionData.ts create mode 100644 src/fixtures/sites.ts create mode 100644 src/fixtures/users.ts create mode 100644 src/fixtures/yaml-fixtures.ts create mode 100644 src/integration/NotificationOnEditHandler.spec.ts create mode 100644 src/integration/Notifications.spec.ts create mode 100644 src/integration/Reviews.spec.ts create mode 100644 src/integration/Sites.spec.ts create mode 100644 src/middleware/__tests__/authorization.ts delete mode 100644 src/middleware/auth.js create mode 100644 src/middleware/authentication.ts create mode 100644 src/middleware/authorization.ts create mode 100644 src/middleware/notificationOnEditHandler.ts create mode 100644 src/routes/v2/authenticated/__tests__/Notifications.spec.ts delete mode 100644 src/routes/v2/authenticated/__tests__/Sites.spec.js create mode 100644 src/routes/v2/authenticated/__tests__/Sites.spec.ts create mode 100644 src/routes/v2/authenticated/__tests__/collaborators.spec.ts create mode 100644 src/routes/v2/authenticated/__tests__/review.spec.ts create mode 100644 src/routes/v2/authenticated/collaborators.ts create mode 100644 src/routes/v2/authenticated/notifications.ts create mode 100644 src/routes/v2/authenticated/review.ts delete mode 100644 src/routes/v2/authenticated/sites.js create mode 100644 src/routes/v2/authenticated/sites.ts create mode 100644 src/services/db/review.ts create mode 100644 src/services/identity/CollaboratorsService.ts create mode 100644 src/services/identity/IsomerAdminsService.ts create mode 100644 src/services/identity/NotificationsService.ts create mode 100644 src/services/identity/OtpService.ts delete mode 100644 src/services/identity/TokenStore.ts create mode 100644 src/services/identity/__tests__/CollaboratorsService.spec.ts create mode 100644 src/services/identity/__tests__/NotificationsService.spec.ts rename src/services/{utilServices => identity}/__tests__/SmsClient.spec.ts (65%) delete mode 100644 src/services/identity/__tests__/TokenStore.spec.ts rename src/services/{utilServices => identity}/__tests__/TotpGenerator.spec.ts (100%) delete mode 100644 src/services/middlewareServices/AuthMiddlewareService.js create mode 100644 src/services/middlewareServices/AuthenticationMiddlewareService.ts create mode 100644 src/services/middlewareServices/AuthorizationMiddlewareService.ts create mode 100644 src/services/middlewareServices/__tests__/AuthorizationMiddlewareService.spec.ts create mode 100644 src/services/review/ReviewRequestService.ts create mode 100644 src/services/review/__tests__/ReviewRequestService.spec.ts create mode 100644 src/services/utilServices/RateLimiter.ts delete mode 100644 src/services/utilServices/SitesService.js create mode 100644 src/services/utilServices/__tests__/RateLimiter.spec.ts delete mode 100644 src/services/utilServices/__tests__/SitesService.spec.js create mode 100644 src/types/commitData.ts create mode 100644 src/types/configYml.ts create mode 100644 src/types/dto/error.ts create mode 100644 src/types/dto/review.ts create mode 100644 src/types/express/session.d.ts create mode 100644 src/types/github.ts create mode 100644 src/types/repoInfo.ts create mode 100644 src/types/review.ts create mode 100644 src/types/siteInfo.ts create mode 100644 src/utils/__tests__/markdown-utils.spec.ts create mode 100644 src/utils/__tests__/yaml-utils.spec.ts create mode 100644 src/utils/notification-utils.ts create mode 100644 src/utils/time-utils.ts create mode 100644 src/utils/yaml-utils.ts diff --git a/.env-example b/.env-example index fd7a6b36a..985336c7b 100644 --- a/.env-example +++ b/.env-example @@ -1,9 +1,10 @@ export CLIENT_ID="" export CLIENT_SECRET="" -export REDIRECT_URI="http://localhost:8081/auth" +export REDIRECT_URI="http://localhost:8081/v1/auth" export NODE_ENV="LOCAL_DEV" export COOKIE_DOMAIN="localhost" export AUTH_TOKEN_EXPIRY_DURATION_IN_MILLISECONDS=3600000 +export SESSION_SECRET=mysessionsecretblah export JWT_SECRET=mysecretblah export ENCRYPTION_SECRET=anothersecretblah export FRONTEND_URL='http://localhost:8081' @@ -11,6 +12,9 @@ export GITHUB_ORG_NAME="isomerpages" export GITHUB_BUILD_ORG_NAME="opengovsg" export GITHUB_BUILD_REPO_NAME="isomer-build" export MUTEX_TABLE_NAME="" +export MAX_NUM_OTP_ATTEMPTS=5 +export OTP_EXPIRY=900000 +export MUTEX_TABLE_NAME="mutex-table" # GitHub access token to create repo export SYSTEM_GITHUB_TOKEN="" @@ -36,7 +40,6 @@ export DB_ENABLE_LOGGING="" export LOCAL_SITE_ACCESS_TOKEN="" export OTP_SECRET="dummysecret" -export DOMAIN_WHITELIST=".gov.sg" # Email export POSTMAN_API_KEY="" @@ -48,3 +51,14 @@ export POSTMAN_SMS_CRED_NAME="" export DD_ENV="local" export DD_SERVICE="isomer" export DD_TAGS="service:isomer" + +# Cloudmersive +export CLOUDMERSIVE_API_KEY="" + +# GitGuardian +export GITGUARDIAN_API_KEY="" + +# SQS Queue +export INCOMING_QUEUE_URL="incoming" +export OUTGOING_QUEUE_URL="outgoing" +export SITE_LAUNCH_QUEUE_URL="site_launch" \ No newline at end of file diff --git a/.env.test b/.env.test index 473c9b6e6..aaf9bead2 100644 --- a/.env.test +++ b/.env.test @@ -10,20 +10,58 @@ export FRONTEND_URL="http://localhost:3000" export GITHUB_ORG_NAME="isomerpages" export GITHUB_BUILD_ORG_NAME="opengovsg" export GITHUB_BUILD_REPO_NAME="isomer-build" +export MUTEX_TABLE_NAME="mutex-table" export ISOMERPAGES_REPO_PAGE_COUNT=3 +export MAX_NUM_OTP_ATTEMPTS=5 +export OTP_EXPIRY=900000 +export SESSION_SECRET=blahblah + +# GitHub access token to create repo +export SYSTEM_GITHUB_TOKEN="github_token" + +# FormSG keys +export SITE_CREATE_FORM_KEY="site_form_key" + +# Required to connect to DynamoDB +export AWS_ACCESS_KEY_ID="abc123" +export AWS_SECRET_ACCESS_KEY="xyz123" + +# Required to run end-to-end tests +export E2E_TEST_REPO="e2e-test-repo" +export E2E_TEST_SECRET="test" +export E2E_TEST_GH_TOKEN="test" # Database export DB_URI="postgres://isomer:password@localhost:54321/isomercms_test" export DB_MIN_POOL="1" export DB_MAX_POOL="10" -export DB_ENABLE_LOGGING="" +export DB_ENABLE_LOGGING="true" -export LOCAL_SITE_ACCESS_TOKEN="" +export LOCAL_SITE_ACCESS_TOKEN="dummy" export OTP_SECRET="dummysecret" -export DOMAIN_WHITELIST=".gov.sg" # Email export POSTMAN_API_KEY="some api key" # SMS export POSTMAN_SMS_CRED_NAME="isomer" + +# Amplify +export AWS_REGION="ap-southeast-1" +export AWS_ACCOUNT_NUMBER="random" + +# DataDog +export DD_ENV="local" +export DD_SERVICE="isomer" +export DD_TAGS="service:isomer" + +# Cloudmersive +export CLOUDMERSIVE_API_KEY="cloudmersive" + +# GitGuardian +export GITGUARDIAN_API_KEY="gitguardian" + +# SQS Queue +export INCOMING_QUEUE_URL="incoming" +export OUTGOING_QUEUE_URL="outgoing" +export SITE_LAUNCH_QUEUE_URL="site_launch" \ No newline at end of file diff --git a/.eslintrc.json b/.eslintrc.json index 330186b8f..2b9c26d6a 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,118 +1,129 @@ { - "env": { - "browser": true, - "commonjs": true, - "es2021": true, - "jest": true, - "jest/globals": true - }, - "extends": [ - "airbnb-base", - "prettier", - "plugin:import/typescript" - ], - "plugins": ["only-warn", "import", "jest"], - "parserOptions": { - "ecmaVersion": 12 - }, - "rules": { - "no-underscore-dangle": "off", - "class-methods-use-this": "off", - "import/no-unresolved": "error", - "import/extensions": ["warn", "never"], - "import/order": [ - "error", - { - "alphabetize": { - "order": "asc", - "caseInsensitive": true - }, - "newlines-between": "always", - "groups": ["builtin", "external", "internal", "parent", "sibling", "index", "object"], - "pathGroups": [{ - "pattern": "@logger/**", - "group": "internal", - "position": "before" - }, - { - "pattern": "@errors/**", - "group": "internal", - "position": "before" - }, - { - "pattern": "@middleware/**", - "group": "internal", - "position": "before" - }, - { - "pattern": "@classes/**", - "group": "internal", - "position": "before" - }, - { - "pattern": "@routes/**", - "group": "internal", - "position": "before" - }, - { - "pattern": "@utils/**", - "group": "internal", - "position": "before" - }, - { - "pattern": "@validators/**", - "group": "internal", - "position": "before" - }, - { - "pattern": "@constants/**", - "group": "internal", - "position": "before" - } - ] - } + "env": { + "browser": true, + "commonjs": true, + "es2021": true, + "jest": true, + "jest/globals": true + }, + "extends": ["airbnb-base", "prettier", "plugin:import/typescript"], + "plugins": ["only-warn", "import", "jest"], + "parserOptions": { + "ecmaVersion": 12 + }, + "rules": { + "no-underscore-dangle": "off", + "class-methods-use-this": "off", + "import/no-unresolved": "error", + "import/extensions": ["warn", "never"], + "import/order": [ + "error", + { + "alphabetize": { + "order": "asc", + "caseInsensitive": true + }, + "newlines-between": "always", + "groups": [ + "builtin", + "external", + "internal", + "parent", + "sibling", + "index", + "object" ], - "jest/no-identical-title": "error" - }, - "overrides": [ - { - "files": ["*.ts", "*.tsx"], - "parser": "@typescript-eslint/parser", - "extends": ["plugin:@typescript-eslint/recommended"] - } - ], - "settings": { - "import/parsers": { - "@typescript-eslint/parser": [".ts", ".tsx"] - }, - "import/resolver": { - "typescript": { - // always try to resolve types under `@types` directory even it doesn't contain any source code, like `@types/unist` - "alwaysTryTypes": true - }, - "node": { - "paths": ["src"], - "extensions": [".js", ".jsx", ".ts", ".tsx"] - }, - "alias": [ - ["@root", "."], - ["@classes", "./classes"], - ["@errors", "./errors"], - ["@logger", "./logger"], - ["@middleware", "./middleware"], - ["@routes", "./routes"], - ["@utils", "./utils"], - ["@loaders", "./loaders"], - ["@database", "./database"], - ["@services", "./services"], - ["@validators", "./validators"], - ["@fixtures", "./fixtures"], - ["@database", "./database"], - ["@constants", "./constants"] - ] - } + "pathGroups": [ + { + "pattern": "@config/**", + "group": "internal", + "position": "before" + }, + { + "pattern": "@logger/**", + "group": "internal", + "position": "before" + }, + { + "pattern": "@errors/**", + "group": "internal", + "position": "before" + }, + { + "pattern": "@middleware/**", + "group": "internal", + "position": "before" + }, + { + "pattern": "@classes/**", + "group": "internal", + "position": "before" + }, + { + "pattern": "@routes/**", + "group": "internal", + "position": "before" + }, + { + "pattern": "@utils/**", + "group": "internal", + "position": "before" + }, + { + "pattern": "@validators/**", + "group": "internal", + "position": "before" + }, + { + "pattern": "@constants/**", + "group": "internal", + "position": "before" + } + ] + } + ], + "jest/no-identical-title": "error" + }, + "overrides": [ + { + "files": ["*.ts", "*.tsx"], + "parser": "@typescript-eslint/parser", + "extends": ["plugin:@typescript-eslint/recommended"] + } + ], + "settings": { + "import/parsers": { + "@typescript-eslint/parser": [".ts", ".tsx"] }, - "globals": { - "Base64": true + "import/resolver": { + "typescript": { + // always try to resolve types under `@types` directory even it doesn't contain any source code, like `@types/unist` + "alwaysTryTypes": true + }, + "node": { + "paths": ["src"], + "extensions": [".js", ".jsx", ".ts", ".tsx"] + }, + "alias": [ + ["@root", "."], + ["@config", "./config"], + ["@classes", "./classes"], + ["@errors", "./errors"], + ["@logger", "./logger"], + ["@middleware", "./middleware"], + ["@routes", "./routes"], + ["@utils", "./utils"], + ["@loaders", "./loaders"], + ["@database", "./database"], + ["@services", "./services"], + ["@validators", "./validators"], + ["@fixtures", "./fixtures"], + ["@database", "./database"], + ["@constants", "./constants"] + ] } -} \ No newline at end of file + }, + "globals": { + "Base64": true + } +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index af3359d29..f858b4531 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -71,7 +71,7 @@ jobs: key: ${{ runner.OS }}-node-${{ hashFiles('**/package-lock.json') }} - run: npm ci - run: npm run dev:services - - run: . .env.test && npx jest + - run: . .env.test && npx jest --runInBand - run: docker compose down gatekeep: diff --git a/.gitignore b/.gitignore index e31fe77b0..c53d70f7e 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ build/ .DS_Store .step-functions-local/ .serverless/ +.cache_ggshield diff --git a/.husky/pre-commit b/.husky/pre-commit index 36af21989..edcec8ef0 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -2,3 +2,4 @@ . "$(dirname "$0")/_/husky.sh" npx lint-staged +source .env && ggshield secret scan pre-commit diff --git a/CHANGELOG.md b/CHANGELOG.md index 7039368ba..ecd4dff09 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,8 +4,17 @@ All notable changes to this project will be documented in this file. Dates are d Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). +#### [v0.18.0](https://github.com/isomerpages/isomercms-backend/compare/v0.17.0...v0.18.0) + +- feat(identity): phase 2 [`#509`](https://github.com/isomerpages/isomercms-backend/pull/509) +- chore(docker compose): remove local emulation of lambdas [`#666`](https://github.com/isomerpages/isomercms-backend/pull/666) +- Chore(Site launch microservices):managing cloud environments [`#657`](https://github.com/isomerpages/isomercms-backend/pull/657) +- 0.17.0 (to develop) [`#654`](https://github.com/isomerpages/isomercms-backend/pull/654) + #### [v0.17.0](https://github.com/isomerpages/isomercms-backend/compare/v0.16.0...v0.17.0) +> 16 March 2023 + - Chore: update pacakages [`#651`](https://github.com/isomerpages/isomercms-backend/pull/651) - build(deps-dev): bump sqlite3 from 5.0.8 to 5.1.5 [`#649`](https://github.com/isomerpages/isomercms-backend/pull/649) - fix(site creation): make rewrites to redirects [`#644`](https://github.com/isomerpages/isomercms-backend/pull/644) @@ -132,15 +141,15 @@ Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). - Feat/add access token table [`#441`](https://github.com/isomerpages/isomercms-backend/pull/441) - Refactor/consolidate misc GitHub requests [`#440`](https://github.com/isomerpages/isomercms-backend/pull/440) - 0.5.0 [`#434`](https://github.com/isomerpages/isomercms-backend/pull/434) - -#### [v0.5.0](https://github.com/isomerpages/isomercms-backend/compare/v0.4.0...v0.5.0) - -> 21 April 2022 - - Fix/migrate script [`#432`](https://github.com/isomerpages/isomercms-backend/pull/432) - Feat/move whitelist into database [`#422`](https://github.com/isomerpages/isomercms-backend/pull/422) - Refactor/use test fixture [`#430`](https://github.com/isomerpages/isomercms-backend/pull/430) - Chore: remove duplicate validation in User model [`#429`](https://github.com/isomerpages/isomercms-backend/pull/429) + +#### [v0.5.0](https://github.com/isomerpages/isomercms-backend/compare/v0.4.0...v0.5.0) + +> 14 April 2022 + - build(deps-dev): bump eslint-config-prettier from 8.1.0 to 8.5.0 [`#371`](https://github.com/isomerpages/isomercms-backend/pull/371) - build(deps): bump async from 3.2.0 to 3.2.3 [`#424`](https://github.com/isomerpages/isomercms-backend/pull/424) - chore(mergify): change to lower case [`#425`](https://github.com/isomerpages/isomercms-backend/pull/425) diff --git a/README.md b/README.md index a9fde9e22..c33393293 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,37 @@ 3. next, run `npm i` to ensure that you have all the required packages 4. run `npm run dev` +## Setup + +1. Ensure pre-commit hooks are setup for safe commits. See below section on "Setting up Git Guardian" +2. Ensure node 14 is installed. Install and use nvm to manage multiple node versions. +3. Run `npm i` to install required packages +4. Ensure [Docker](https://www.docker.com/products/docker-desktop/) is installed +5. Run `npm run dev:services` to bring up the docker containers +6. Run `npm run dev` to start the server + +## Setting Up Git Guardian + +1. Install GitGuardian + +``` +brew install gitguardian/tap/ggshield +``` + +2. Add the API Key to your `.env` file + +``` +# Service API key from GitGuardian account +export GITGUARDIAN_API_KEY=abc123 +``` + +Notes: + +Only if necessary, + +- To skip all pre-commit hooks, use `$ git commit -m "commit message" -n` +- To skip only GitGuardianā€™s hook, use `$ SKIP=ggshield git commit -m "commit message"` + ## E2E Tests To run the E2E tests successfully, you will need to define the following environment variables: diff --git a/docker-compose.yml b/docker-compose.yml index 262afeaa3..db8902eff 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,27 +21,5 @@ services: # use a different port to avoid blocking dev environment when running tests - "54321:5432" - # Runs the AWS-provided emulator for step functions - stepFunctionsLocal: - image: amazon/aws-stepfunctions-local - environment: - AWS_SECRET_KEY: blah - AWS_ACCESS_KEY_ID: bleh - AWS_ACCOUNT_ID: 101010101010 - AWS_DEFAULT_REGION: us-east-1 - LAMBDA_ENDPOINT: http://host.docker.internal:3002 - ports: - - 8083:8083 - - # Start local stack - localstack: - image: localstack/localstack - environment: - - AWS_DEFAULT_REGION=ap-southeast-1 - - EDGE_PORT=4566 - - SERVICES=sqs - ports: - - "4566-4599:4566-4599" - volumes: isomercms_data: diff --git a/jest.config.js b/jest.config.js index 44c385eeb..b749e4eb4 100644 --- a/jest.config.js +++ b/jest.config.js @@ -7,6 +7,7 @@ module.exports = { "^@root/(.*)": "/$1", "^@classes/(.*)": "/classes/$1", "^@errors/(.*)": "/errors/$1", + "^@config/(.*)": "/config/$1", "^@logger/(.*)": "/logger/$1", "^@middleware/(.*)": "/middleware/$1", "^@routes/(.*)": "/routes/$1", diff --git a/microservices/README.md b/microservices/README.md new file mode 100644 index 000000000..4e4709af7 --- /dev/null +++ b/microservices/README.md @@ -0,0 +1,15 @@ +## Microservices + +This folder contain the microservices that are needed for the functionality of the site launch process. + +We do intend to move away from serverless soon in favour of Pullumi. In the interim, here are the prerequisites for deploying into the cloud. + +1. Run `npm install -g serverless` + +By the very nature of cloud development, everyone will have access to the same shared resource. If you seek to do develop in an isolated environment, please use: + +`npm run deploy:dev -- --stage ` + +After development, please clean up by using: + +`npm run destroy:dev -- --stage ` diff --git a/microservices/package-lock.json b/microservices/package-lock.json index 9cb35b9e0..7fd751ec5 100644 --- a/microservices/package-lock.json +++ b/microservices/package-lock.json @@ -2207,6 +2207,45 @@ "kuler": "^2.0.0" } }, + "@hapi/address": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@hapi/address/-/address-2.1.4.tgz", + "integrity": "sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==", + "dev": true + }, + "@hapi/bourne": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-1.3.2.tgz", + "integrity": "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==", + "dev": true + }, + "@hapi/hoek": { + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.1.tgz", + "integrity": "sha512-yN7kbciD87WzLGc5539Tn0sApjyiGHAJgKvG9W8C7O+6c7qmoQMfVs0W4bX17eqz6C78QJqqFrtgdK5EWf6Qow==", + "dev": true + }, + "@hapi/joi": { + "version": "15.1.1", + "resolved": "https://registry.npmjs.org/@hapi/joi/-/joi-15.1.1.tgz", + "integrity": "sha512-entf8ZMOK8sc+8YfeOlM8pCfg3b5+WZIKBfUaaJT8UsjAAPjartzxIYm3TIbjvA4u+u++KbcXD38k682nVHDAQ==", + "dev": true, + "requires": { + "@hapi/address": "2.x.x", + "@hapi/bourne": "1.x.x", + "@hapi/hoek": "8.x.x", + "@hapi/topo": "3.x.x" + } + }, + "@hapi/topo": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/@hapi/topo/-/topo-3.1.6.tgz", + "integrity": "sha512-tAag0jEcjwH+P2quUfipd7liWCNX2F8NvYjQp2wtInsZxnMlypdw0FtAOLxtvvkO+GSRRbmNi8m/5y42PQJYCQ==", + "dev": true, + "requires": { + "@hapi/hoek": "^8.3.0" + } + }, "@kwsites/file-exists": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/@kwsites/file-exists/-/file-exists-1.1.1.tgz", @@ -2749,6 +2788,16 @@ "@types/responselike": "*" } }, + "@types/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", + "dev": true, + "requires": { + "@types/minimatch": "*", + "@types/node": "*" + } + }, "@types/http-cache-semantics": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz", @@ -2783,6 +2832,12 @@ "resolved": "https://registry.npmjs.org/@types/lru-cache/-/lru-cache-5.1.1.tgz", "integrity": "sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw==" }, + "@types/minimatch": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-5.1.2.tgz", + "integrity": "sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA==", + "dev": true + }, "@types/node": { "version": "18.11.7", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.11.7.tgz", @@ -2978,6 +3033,35 @@ "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==", "dev": true }, + "asl-path-validator": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/asl-path-validator/-/asl-path-validator-0.11.0.tgz", + "integrity": "sha512-2kfFkqNCXInc7d8hbUoXn/XpK5fFr3//0nh4jfcZWav0VR4zo2bYVlRCwOuNKJID9yM4vIo7dMb4n0fnWrc/Xw==", + "dev": true, + "requires": { + "jsonpath-plus": "^7.0.0" + } + }, + "asl-validator": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/asl-validator/-/asl-validator-3.5.0.tgz", + "integrity": "sha512-DyIw6MwrePKutwxizPOxgaJhRT8klcWGNEC0fRpz7HtNMDysRBTCetGgjBR41CjXASd7ldvcIuzRA6CS4dbMxg==", + "dev": true, + "requires": { + "ajv": "^8.11.0", + "asl-path-validator": "^0.11.0", + "commander": "^5.1.0", + "jsonpath-plus": "^7.0.0" + }, + "dependencies": { + "commander": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", + "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", + "dev": true + } + } + }, "ast-types": { "version": "0.13.4", "resolved": "https://registry.npmjs.org/ast-types/-/ast-types-0.13.4.tgz", @@ -3448,6 +3532,16 @@ "simple-swizzle": "^0.2.2" } }, + "colorful": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/colorful/-/colorful-2.1.0.tgz", + "integrity": "sha512-DpDLDvi/vPzqoPX7Dw44ZZf004DCdEcCx1pf5hq5aipVHXjwgRSYGCz3m17rA2XCduW91wJUapge8/3qLvjYcg==" + }, + "colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", + "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==" + }, "colorspace": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.4.tgz", @@ -3568,6 +3662,11 @@ "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-3.0.1.tgz", "integrity": "sha512-WboRycPNsVw3B3TL559F7kuBUM4d8CgMEvk6xEJlOp7OBPjt6G7z8WMWlD2rOFZLk6OYfFIUGsCOWzcQH9K2og==" }, + "dateformat": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.5.1.tgz", + "integrity": "sha512-OD0TZ+B7yP7ZgpJf5K2DIbj3FZvFvxgFUuaqA/V5zTjAtAAXZ1E8bktHxmAGs4x5b7PflqA9LeQ84Og7wYtF7Q==" + }, "dayjs": { "version": "1.11.6", "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.6.tgz", @@ -5048,6 +5147,12 @@ "universalify": "^2.0.0" } }, + "jsonpath-plus": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-7.2.0.tgz", + "integrity": "sha512-zBfiUPM5nD0YZSBT/o/fbCUlCcepMIdP0CJZxM1+KgA4f2T206f6VAg9e7mX35+KlMaIc5qXW34f3BnwJ3w+RA==", + "dev": true + }, "jsonwebtoken": { "version": "8.5.1", "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", @@ -5469,8 +5574,7 @@ "mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" }, "ms": { "version": "2.1.3", @@ -6230,6 +6334,75 @@ } } }, + "serverless-plugin-typescript": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/serverless-plugin-typescript/-/serverless-plugin-typescript-2.1.4.tgz", + "integrity": "sha512-6+IHXlsDydwDu+3ZhJiWyaFsfAoHbXdFGk10RJjipFYW+KLIoGMAxazXeiq0YQtC7uJYOtfYtGM1PtNjxOXAJg==", + "dev": true, + "requires": { + "fs-extra": "^7.0.1", + "globby": "^10.0.2", + "lodash": "^4.17.21" + }, + "dependencies": { + "fs-extra": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-7.0.1.tgz", + "integrity": "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + } + }, + "globby": { + "version": "10.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-10.0.2.tgz", + "integrity": "sha512-7dUi7RvCoT/xast/o/dLN53oqND4yk0nsHkhRgn9w65C4PofCLOoJ39iSOg+qVDdWQPIEj+eszMHQ+aLVwwQSg==", + "dev": true, + "requires": { + "@types/glob": "^7.1.1", + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.0.3", + "glob": "^7.1.3", + "ignore": "^5.1.1", + "merge2": "^1.2.3", + "slash": "^3.0.0" + } + }, + "jsonfile": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", + "integrity": "sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.6" + } + }, + "universalify": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.1.2.tgz", + "integrity": "sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==", + "dev": true + } + } + }, + "serverless-step-functions": { + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/serverless-step-functions/-/serverless-step-functions-3.13.0.tgz", + "integrity": "sha512-lJOedjdKShJW3bemwhvTUAMqKu/uWJYFNKEtBxgOsw/BuWxCFvL6esKY+WA3QR7jeHtBknI5U/SStI4j3a+x+w==", + "dev": true, + "requires": { + "@hapi/joi": "^15.0.2", + "@serverless/utils": "^6.7.0", + "asl-validator": "^3.1.0", + "bluebird": "^3.4.0", + "chalk": "^4.1.2", + "lodash": "^4.17.11" + } + }, "setimmediate": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz", @@ -6578,6 +6751,11 @@ "next-tick": "1" } }, + "tinytim": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/tinytim/-/tinytim-0.1.1.tgz", + "integrity": "sha512-NIpsp9lBIxPNzB++HnMmUd4byzJSVbbO4F+As1Gb1IG/YQT5QvmBDjpx8SpDS8fhGC+t+Qw8ldQgbcAIaU+2cA==" + }, "tmp": { "version": "0.0.33", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", @@ -6630,6 +6808,17 @@ "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, + "tracer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/tracer/-/tracer-1.1.6.tgz", + "integrity": "sha512-VKEIQRNgzSgti18whs+8l7e2y/gWcklw+C/xZtFH/AGvaN6GDlvhkQTFEsy448Gxb5MtbNbzJiG0L1TJEQnqcA==", + "requires": { + "colors": "1.4.0", + "dateformat": "4.5.1", + "mkdirp": "^1.0.4", + "tinytim": "0.1.1" + } + }, "traverse": { "version": "0.6.7", "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.6.7.tgz", @@ -6791,6 +6980,22 @@ "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, + "utilx": { + "version": "0.0.5", + "resolved": "https://registry.npmjs.org/utilx/-/utilx-0.0.5.tgz", + "integrity": "sha512-lsNsH9TmfMMOPzoqM/Sai5DU4PwWDfHHUjEhGqQ+SB+Zngn+x3+UICj08QhZfc59rlykcD0lAShtgrtoGANsMQ==", + "requires": { + "colorful": "2.1.0", + "iconv-lite": "0.2.11" + }, + "dependencies": { + "iconv-lite": { + "version": "0.2.11", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.2.11.tgz", + "integrity": "sha512-KhmFWgaQZY83Cbhi+ADInoUQ8Etn6BG5fikM9syeOjQltvR45h7cRKJ/9uvQEuD61I3Uju77yYce0/LhKVClQw==" + } + } + }, "uuid": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", @@ -6805,6 +7010,24 @@ "builtins": "^1.0.3" } }, + "velocity": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/velocity/-/velocity-0.7.3.tgz", + "integrity": "sha512-x3BlBWsdWr2L/73YKsx0nw3Y40ayZs+GoFP1jRBRU/cowph8LSLvqPmXyt3E604WfNr9xf485KgjHIWFZQ5Hug==", + "requires": { + "colorful": "~2.1.0", + "commander": "~2.3.0", + "tracer": "~1.1.4", + "utilx": "0.0.5" + }, + "dependencies": { + "commander": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.3.0.tgz", + "integrity": "sha512-CD452fnk0jQyk3NfnK+KkR/hUPoHt5pVaKHogtyyv3N0U4QfAal9W0/rXLOg/vVZgQKa7jdtXypKs1YAip11uQ==" + } + } + }, "vm2": { "version": "3.9.12", "resolved": "https://registry.npmjs.org/vm2/-/vm2-3.9.12.tgz", diff --git a/microservices/package.json b/microservices/package.json index 746fe6182..824973f9a 100644 --- a/microservices/package.json +++ b/microservices/package.json @@ -4,7 +4,10 @@ "description": "", "main": "index.js", "scripts": { - "test": "echo \"Error: no test specified\" && exit 1" + "deploy:dev": "source ../.env && sls deploy", + "deploy:staging": "source ../.env && sls deploy --stage staging", + "deploy:prod": "source ../.env && sls deploy --stage prod", + "destroy:dev": "source ../.env && sls remove" }, "author": "", "license": "ISC", @@ -14,12 +17,15 @@ "@octokit/rest": "^19.0.5", "aws-sdk": "^2.1241.0", "octokit": "^2.0.10", + "velocity": "^0.7.3", "winston": "^3.8.2", "winston-cloudwatch": "^6.1.1" }, "devDependencies": { "@types/aws-lambda": "^8.10.108", "aws-lambda": "^1.0.7", - "serverless": "^3.23.0" + "serverless": "^3.23.0", + "serverless-plugin-typescript": "^2.1.4", + "serverless-step-functions": "^3.13.0" } } diff --git a/microservices/serverless.yml b/microservices/serverless.yml index 30cfab593..4a7de39a1 100644 --- a/microservices/serverless.yml +++ b/microservices/serverless.yml @@ -3,12 +3,8 @@ useDotenv: true # to allow reading of .env file frameworkVersion: "3" plugins: - # serverless-plugin-typescript needs to precede serverless-offline - serverless-plugin-typescript - serverless-step-functions - - serverless-step-functions-local - - serverless-offline-lambda - - serverless-offline provider: name: aws @@ -19,16 +15,6 @@ provider: OUTGOING_QUEUE_URL: ${env:OUTGOING_QUEUE_URL} NODE_ENV: ${env:NODE_ENV} -custom: - stepFunctionsLocal: - accountId: ${env:AWS_ACCOUNT_NUMBER} - region: ${env:AWS_REGION} - lambdaEndpoint: http://localhost:3002 - eventBridgeEvents: - enabled: true - endpoint: http://localhost:4010 - sqsUrl: ${env:INCOMING_QUEUE_URL} - functions: generalDomainValidation: handler: handler.generalDomainValidation @@ -44,7 +30,11 @@ functions: handler: handler.stepFunctionsTrigger events: - sqs: arn:aws:sqs:${aws:region}:${aws:accountId}:outgoingQueueStaging + +custom: + stateMachineName: SiteLaunchStepFunctions-${opt:stage, 'dev'} stepFunctions: stateMachines: siteLaunch: + name: ${self:custom.stateMachineName} definition: ${file(./site-launch/step-function-workflows/site-launch.asl.yaml)} diff --git a/package-lock.json b/package-lock.json index 2bf90025f..92ed900d5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "isomercms", - "version": "0.17.0", + "version": "0.18.0", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -62,31 +62,14 @@ "@aws-sdk/types": "^3.222.0", "@aws-sdk/util-utf8-browser": "^3.0.0", "tslib": "^1.11.1" - }, - "dependencies": { - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - }, - "dependencies": { - "tslib": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", - "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" - } - } - } } }, "@aws-sdk/abort-controller": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/abort-controller/-/abort-controller-3.290.0.tgz", - "integrity": "sha512-Q4AqucQnhcsauH6tDf1bSRuOW/Ejwjs1qHPLlvknwX1IoxZettP3lXz9LLd8KZnEMFQLHPmBTbFIW+Ivpzl+vw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/abort-controller/-/abort-controller-3.292.0.tgz", + "integrity": "sha512-lf+OPptL01kvryIJy7+dvFux5KbJ6OTwLPPEekVKZ2AfEvwcVtOZWFUhyw3PJCBTVncjKB1Kjl3V/eTS3YuPXQ==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -98,1333 +81,142 @@ } }, "@aws-sdk/client-amplify": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-amplify/-/client-amplify-3.290.0.tgz", - "integrity": "sha512-IzcSuSmMNNBNCKqUAncm+gSIGPuZbwUWQrzZ77wpq2mqGsaHfb+FQP7TtHcXBK2L4gCf4FZQcqNAM1cfhnOARA==", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-amplify/-/client-amplify-3.293.0.tgz", + "integrity": "sha512-c0SBEEHkYnrF7uFQU7WGUKe2La29rVjPtL8y5CZBGGjzY+8NbjzGDHfRPGmFYkhyJcoKOU3nN/lrtx+lUyZ9Jg==", "requires": { "@aws-crypto/sha256-browser": "3.0.0", "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/client-sts": "3.290.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/credential-provider-node": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", + "@aws-sdk/client-sts": "3.293.0", + "@aws-sdk/config-resolver": "3.292.0", + "@aws-sdk/credential-provider-node": "3.293.0", + "@aws-sdk/fetch-http-handler": "3.292.0", + "@aws-sdk/hash-node": "3.292.0", + "@aws-sdk/invalid-dependency": "3.292.0", + "@aws-sdk/middleware-content-length": "3.292.0", + "@aws-sdk/middleware-endpoint": "3.292.0", + "@aws-sdk/middleware-host-header": "3.292.0", + "@aws-sdk/middleware-logger": "3.292.0", + "@aws-sdk/middleware-recursion-detection": "3.292.0", + "@aws-sdk/middleware-retry": "3.293.0", + "@aws-sdk/middleware-serde": "3.292.0", + "@aws-sdk/middleware-signing": "3.292.0", + "@aws-sdk/middleware-stack": "3.292.0", + "@aws-sdk/middleware-user-agent": "3.293.0", + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/node-http-handler": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/smithy-client": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/url-parser": "3.292.0", + "@aws-sdk/util-base64": "3.292.0", + "@aws-sdk/util-body-length-browser": "3.292.0", + "@aws-sdk/util-body-length-node": "3.292.0", + "@aws-sdk/util-defaults-mode-browser": "3.292.0", + "@aws-sdk/util-defaults-mode-node": "3.292.0", + "@aws-sdk/util-endpoints": "3.293.0", + "@aws-sdk/util-retry": "3.292.0", + "@aws-sdk/util-user-agent-browser": "3.292.0", + "@aws-sdk/util-user-agent-node": "3.292.0", + "@aws-sdk/util-utf8": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-crypto/ie11-detection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz", - "integrity": "sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==", - "requires": { - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/sha256-browser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz", - "integrity": "sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==", - "requires": { - "@aws-crypto/ie11-detection": "^3.0.0", - "@aws-crypto/sha256-js": "^3.0.0", - "@aws-crypto/supports-web-crypto": "^3.0.0", - "@aws-crypto/util": "^3.0.0", - "@aws-sdk/types": "^3.222.0", - "@aws-sdk/util-locate-window": "^3.0.0", - "@aws-sdk/util-utf8-browser": "^3.0.0", - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/sha256-js": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz", - "integrity": "sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==", - "requires": { - "@aws-crypto/util": "^3.0.0", - "@aws-sdk/types": "^3.222.0", - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/supports-web-crypto": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz", - "integrity": "sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==", - "requires": { - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/util": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-3.0.0.tgz", - "integrity": "sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==", - "requires": { - "@aws-sdk/types": "^3.222.0", - "@aws-sdk/util-utf8-browser": "^3.0.0", - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-sdk/abort-controller": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/abort-controller/-/abort-controller-3.290.0.tgz", - "integrity": "sha512-Q4AqucQnhcsauH6tDf1bSRuOW/Ejwjs1qHPLlvknwX1IoxZettP3lXz9LLd8KZnEMFQLHPmBTbFIW+Ivpzl+vw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/client-sso": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.290.0.tgz", - "integrity": "sha512-FUFAbptuJSRKnzBgFJqXxusSG7PzECSqX0FnMh2vxCVu2PifaAE4stiMW8Myj8ABQAbfIrAWM+17upcrfmudoA==", - "requires": { - "@aws-crypto/sha256-browser": "3.0.0", - "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/client-sts": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.290.0.tgz", - "integrity": "sha512-E2X/7tZLziKLgi/owYoUL5gcorGJrbM2tANJdJmaqVUPhPvoY4wU8P91pGPKon9nQj0RQexre5ClZawYD6lTzA==", - "requires": { - "@aws-crypto/sha256-browser": "3.0.0", - "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/credential-provider-node": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-sdk-sts": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", - "fast-xml-parser": "4.1.2", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/config-resolver": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.290.0.tgz", - "integrity": "sha512-Ovskri6IR4iBK0+3ttgjPSgOUEC+fd5tqRN5JlPCCZ9VwqwF/z26yYC4fAPaMUAJwPVRFeYYzQoszXGoxPyG7g==", - "requires": { - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-config-provider": "3.208.0", - "@aws-sdk/util-middleware": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-env": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.290.0.tgz", - "integrity": "sha512-gWsllElBm4DWZcc42Zb6sxaw77KBf6cY9iEezbVzVbJioqR9hIr1Pq3Nx30z1Q+1KiHSnt/Wl9cYYHOoNw2DnQ==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-imds": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.290.0.tgz", - "integrity": "sha512-PkYEs7zzUVWnhkR9TlU1ORDcCnkD7qoqR1loXXSZc+EIOX9M7f+sXGLtCXVl9wV1Ekx3a5Tjud+aQcOJjjFePA==", - "requires": { - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-ini": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.290.0.tgz", - "integrity": "sha512-n3OGvkvNgMS6Kb2fuFrmNeCI8CP7DGOsEvcfYPMiXsQWx9hHAh/XIv7ksD3TL5Mn8Dr0NHmB6uY5WgUZDatqfw==", - "requires": { - "@aws-sdk/credential-provider-env": "3.290.0", - "@aws-sdk/credential-provider-imds": "3.290.0", - "@aws-sdk/credential-provider-process": "3.290.0", - "@aws-sdk/credential-provider-sso": "3.290.0", - "@aws-sdk/credential-provider-web-identity": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.290.0.tgz", - "integrity": "sha512-snLmeD7yAYq1x7lngCTM1VGmHYCZ4iUW5JRG9XPr7Npl7VWVdnNqaf5XBYEANgaFoWxjN3dNyDPg05+5Ew6QCA==", - "requires": { - "@aws-sdk/credential-provider-env": "3.290.0", - "@aws-sdk/credential-provider-imds": "3.290.0", - "@aws-sdk/credential-provider-ini": "3.290.0", - "@aws-sdk/credential-provider-process": "3.290.0", - "@aws-sdk/credential-provider-sso": "3.290.0", - "@aws-sdk/credential-provider-web-identity": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-process": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.290.0.tgz", - "integrity": "sha512-PNnWDYSaE8dMepH59cyrXs45Ucdmzdnyuhcn/fVwQ0Nc7FzESxw1G7SgJZhYF4tMRDiepu6lbFEN0QXsTIM8Iw==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-sso": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.290.0.tgz", - "integrity": "sha512-tX5Ez3EiMrXDx6Vsn2gMq7ga3y4iyPneenCNToRUlmZrhF61DhMfA22gRwdwuP8hlFKXY4LRg51pBfJeq0ga8w==", - "requires": { - "@aws-sdk/client-sso": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/token-providers": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-web-identity": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.290.0.tgz", - "integrity": "sha512-Apv6AnYtb5LTUreDVsqlXFNgiU0TQAZ8sfPg23pGrBGZvZU3KfDhF9n5j0i9Uca44O+/vB7UvbbvNAZS200vsQ==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/fetch-http-handler": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.290.0.tgz", - "integrity": "sha512-hehbIxcqyJeiUBTbbP3C4tmY2p9UIh7bnLTKhocqaUcdEXQwlIRiQlnnA+TrQ5Uyoe+W3fAmv25tq08rB9ddhw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/querystring-builder": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/hash-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/hash-node/-/hash-node-3.290.0.tgz", - "integrity": "sha512-ayqJBOPoMa3H3eUhZHPu9ikNjoydu3nxj+R6tp8nMrKfFYDUu0XCdkpB0Wk/EBpMyWA2ZeyyfgXEUtQkqkAWBA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-buffer-from": "3.208.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/invalid-dependency": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/invalid-dependency/-/invalid-dependency-3.290.0.tgz", - "integrity": "sha512-plJpEJ+PPTrpaMfg5KKsAfdXUi6iUZTc/PgP0/CPqCe3kuiWb1xb2GeTxOL5InzfBffVdHWeTanYu9+V0MIxVw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/is-array-buffer": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.201.0.tgz", - "integrity": "sha512-UPez5qLh3dNgt0DYnPD/q0mVJY84rA17QE26hVNOW3fAji8W2wrwrxdacWOxyXvlxWsVRcKmr+lay1MDqpAMfg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-content-length": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-content-length/-/middleware-content-length-3.290.0.tgz", - "integrity": "sha512-9I+vnGSe/S0U98ZRCbOAdQngYfO7kYvXb5gjjX08XUQDfbB+ooIM1VdKngHhzUCTAs48z/43PzpBCjGJvGjB9w==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-host-header": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.290.0.tgz", - "integrity": "sha512-j1ss8pjSJyG0aB+X0VPYgTfoieB8m5c+PrWw85JRM/qgbQeurkyD3d/F00V1hkZI42gygOaPlmYMik3kQnmITw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-logger": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.290.0.tgz", - "integrity": "sha512-wJOK31t/Y/Km6B5ULF/k2RmQB/6MXSN/hMuCiYsLMapFT+86mBlY8cXytYXtLS8afRKpuNy29EY+O6ovfjz6Ig==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-retry": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-retry/-/middleware-retry-3.290.0.tgz", - "integrity": "sha512-mvXvYd/3L/f5ZcnFI1Q2hwk0OtzKMmkDfWW1BcoVzK0XHf2aeehbs7xgI92ICEi/5Ali0IG5krv5LqfgO154Sw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/service-error-classification": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "tslib": "^2.3.1", - "uuid": "^8.3.2" - } - }, - "@aws-sdk/middleware-sdk-sts": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.290.0.tgz", - "integrity": "sha512-NaYnDhFtmz/e9jNBNeY10A4AldCvjF46ZfeIWoBUsk/4qDlSP9kaCjTufEjNf/zMTtYzGiP/FUtLS7T6tfXdoQ==", - "requires": { - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-serde": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-serde/-/middleware-serde-3.290.0.tgz", - "integrity": "sha512-lZCKlfJzosi3cVx02RKRTVvbAijHTfd16EiSyKRsQOF2rCu7Qt4LzygIlqUonCeHG6eSqOMMf7LAJ22IHafBbw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-signing": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.290.0.tgz", - "integrity": "sha512-mEJZQrbXkOTI+BdFlpAd1CleVJL8B7qayANMNj9nrZqvZ7HzVDLEkNaJqFz9JFizYTfZC2ZjtATPrSiYDvFEfg==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-middleware": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-stack": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-stack/-/middleware-stack-3.290.0.tgz", - "integrity": "sha512-25iC/7oAokRfxixGkDjBSIAkNwtx2kcO+xMoDczFus9UrlOr2pBY0IXbPn6bB56q2zwsBTHcmMTn0H7FJSIQmw==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-user-agent": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.290.0.tgz", - "integrity": "sha512-ZR49PPra3LtqZBmXAtV8YrUSrkVG0hPBICE8cma/wMwbKGHa0G+Xu4pOZP0oQXs5XeGu1cs/Nx3AOJ2fgaMjhQ==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/node-config-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-config-provider/-/node-config-provider-3.290.0.tgz", - "integrity": "sha512-dQLnyCy5iT7Q5Ot2JOciNH9WkaixWwmEnvW6nBa6febzAYZVy78sfJOOP1EZ7ClG1aIhrsAN7/7wPebpn/Peiw==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/node-http-handler": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-http-handler/-/node-http-handler-3.290.0.tgz", - "integrity": "sha512-HfzuzdpAJpO/ob9DQ3aEB/WmPCS5vZOic9T4TtSCmRd5e3+xdMtK/MQUizp8XkbUGBat7jPmcV13Gy4YmwfAuw==", - "requires": { - "@aws-sdk/abort-controller": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/querystring-builder": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/property-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.290.0.tgz", - "integrity": "sha512-2Zrh6/KecmiZ/cKVaeDtHRAfyOnAEfwJsgxfFugs3RxjJtYr0AbYJTF+mYp3f8Xc7DCjdxR055axo9TCTBSrwg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/protocol-http": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.290.0.tgz", - "integrity": "sha512-3VHbfmo7vaA/0ugJedjwyK85MT+OKQanz7ktUnAONH5KdG2/gPpa9ZSTyfK9kCVFin93YzC3pznZgr6oNYgGgg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/querystring-builder": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-builder/-/querystring-builder-3.290.0.tgz", - "integrity": "sha512-7q8x8ux1RCUxUolqxsXfSbCObyMzvSwfJb9GgZ8rDi2U61l8W760a9ejHzizfQJvdldRSwFqmynkRAqYbvKixg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/querystring-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.290.0.tgz", - "integrity": "sha512-8QPDihJKSFYFphxUl5+FfXMQowhAoHuDeoqd1ce3byL0bm7k8emcGfiYD6QGxuDlpno+F4O1/Mz+e+cwNCdPVA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/service-error-classification": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/service-error-classification/-/service-error-classification-3.290.0.tgz", - "integrity": "sha512-QP+QgL5Gm6RKl4KGwTRyG1kw0SxBbcmp/a/yhywVHmRI0/+4VsL+cooTqtjFr3xVmKoCX+/JZZ8P96VGFvRSZA==" - }, - "@aws-sdk/shared-ini-file-loader": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.290.0.tgz", - "integrity": "sha512-kvLW5rwr4lwHdwkYnoHYpFVfWwZYwQO44eRnkrDnyvvhZTcCH3rBLApu6uvomnL+Ep4bEJ1anDKt3WywlGg5Qw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/signature-v4": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.290.0.tgz", - "integrity": "sha512-SUMflc8b8PC0ITV3AdYBSlTcn4oFjumBAPNNXBLKIpifQ1l7ZufFIulDPlqeouXTDwsuCVINAwE0DbItDe/7Qw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-hex-encoding": "3.201.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/smithy-client": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.290.0.tgz", - "integrity": "sha512-MDa+BJqM1FP2HYugVAscufoLJuapEdUTZPoyERVGfUEznKfKH33QXRoeqW1wzUNyhcxFONHLnXp1aYFBtnLx7g==", - "requires": { - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/url-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.290.0.tgz", - "integrity": "sha512-19EAlyH4LyNMbAROE6KSuhFKhOwl67kciDavPjS8gFiHr6slon3oqXfz10+uzKf/pJKuY6qOpkUb9h7LnF4bFQ==", - "requires": { - "@aws-sdk/querystring-parser": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-body-length-browser": { - "version": "3.188.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.188.0.tgz", - "integrity": "sha512-8VpnwFWXhnZ/iRSl9mTf+VKOX9wDE8QtN4bj9pBfxwf90H1X7E8T6NkiZD3k+HubYf2J94e7DbeHs7fuCPW5Qg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-body-length-node": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-node/-/util-body-length-node-3.208.0.tgz", - "integrity": "sha512-3zj50e5g7t/MQf53SsuuSf0hEELzMtD8RX8C76f12OSRo2Bca4FLLYHe0TZbxcfQHom8/hOaeZEyTyMogMglqg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-buffer-from": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.208.0.tgz", - "integrity": "sha512-7L0XUixNEFcLUGPeBF35enCvB9Xl+K6SQsmbrPk1P3mlV9mguWSDQqbOBwY1Ir0OVbD6H/ZOQU7hI/9RtRI0Zw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-hex-encoding": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.201.0.tgz", - "integrity": "sha512-7t1vR1pVxKx0motd3X9rI3m/xNp78p3sHtP5yo4NP4ARpxyJ0fokBomY8ScaH2D/B+U5o9ARxldJUdMqyBlJcA==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-uri-escape": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.201.0.tgz", - "integrity": "sha512-TeTWbGx4LU2c5rx0obHeDFeO9HvwYwQtMh1yniBz00pQb6Qt6YVOETVQikRZ+XRQwEyCg/dA375UplIpiy54mA==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-user-agent-browser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.290.0.tgz", - "integrity": "sha512-I+B5ooKRYQ9jHcdg7TOf20LlTfcBUlCJQ2AAqI1ukmJqal22OD1CtC1E+/XbplpU5mxRs4s2UQbxNaPA0yIrBA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "bowser": "^2.11.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-user-agent-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.290.0.tgz", - "integrity": "sha512-7juKgEMqpa0il6jZmiBKGDJslM4UIKX1bvhlqkSvvPfV3zFdfi0V2xavh68GfelWduBBkYLGRjsLunqzw64f8A==", - "requires": { - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "fast-xml-parser": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.1.2.tgz", - "integrity": "sha512-CDYeykkle1LiA/uqQyNwYpFbyF6Axec6YapmpUP+/RHWIoR1zKjocdvNaTsxCxZzQ6v9MLXaSYm9Qq0thv0DHg==", - "requires": { - "strnum": "^1.0.5" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" } - } - }, - "@aws-sdk/client-cloudwatch-logs": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-cloudwatch-logs/-/client-cloudwatch-logs-3.290.0.tgz", - "integrity": "sha512-bCnmhbiLFjlZjDZm9zdslEFZRB8dEfJb2NFPBjT//mC8n2sawGkkyL3yx/5onOQbalRa3maYLLpk7N8Nf99f6w==", - "requires": { - "@aws-crypto/sha256-browser": "3.0.0", - "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/client-sts": "3.290.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/credential-provider-node": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - }, - "dependencies": { - "@aws-crypto/ie11-detection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz", - "integrity": "sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==", - "requires": { - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/sha256-browser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz", - "integrity": "sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==", - "requires": { - "@aws-crypto/ie11-detection": "^3.0.0", - "@aws-crypto/sha256-js": "^3.0.0", - "@aws-crypto/supports-web-crypto": "^3.0.0", - "@aws-crypto/util": "^3.0.0", - "@aws-sdk/types": "^3.222.0", - "@aws-sdk/util-locate-window": "^3.0.0", - "@aws-sdk/util-utf8-browser": "^3.0.0", - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/sha256-js": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz", - "integrity": "sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==", - "requires": { - "@aws-crypto/util": "^3.0.0", - "@aws-sdk/types": "^3.222.0", - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/supports-web-crypto": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz", - "integrity": "sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==", - "requires": { - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-sdk/abort-controller": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/abort-controller/-/abort-controller-3.290.0.tgz", - "integrity": "sha512-Q4AqucQnhcsauH6tDf1bSRuOW/Ejwjs1qHPLlvknwX1IoxZettP3lXz9LLd8KZnEMFQLHPmBTbFIW+Ivpzl+vw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/client-sso": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.290.0.tgz", - "integrity": "sha512-FUFAbptuJSRKnzBgFJqXxusSG7PzECSqX0FnMh2vxCVu2PifaAE4stiMW8Myj8ABQAbfIrAWM+17upcrfmudoA==", - "requires": { - "@aws-crypto/sha256-browser": "3.0.0", - "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/client-sts": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.290.0.tgz", - "integrity": "sha512-E2X/7tZLziKLgi/owYoUL5gcorGJrbM2tANJdJmaqVUPhPvoY4wU8P91pGPKon9nQj0RQexre5ClZawYD6lTzA==", - "requires": { - "@aws-crypto/sha256-browser": "3.0.0", - "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/credential-provider-node": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-sdk-sts": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", - "fast-xml-parser": "4.1.2", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/config-resolver": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.290.0.tgz", - "integrity": "sha512-Ovskri6IR4iBK0+3ttgjPSgOUEC+fd5tqRN5JlPCCZ9VwqwF/z26yYC4fAPaMUAJwPVRFeYYzQoszXGoxPyG7g==", - "requires": { - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-config-provider": "3.208.0", - "@aws-sdk/util-middleware": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-env": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.290.0.tgz", - "integrity": "sha512-gWsllElBm4DWZcc42Zb6sxaw77KBf6cY9iEezbVzVbJioqR9hIr1Pq3Nx30z1Q+1KiHSnt/Wl9cYYHOoNw2DnQ==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-imds": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.290.0.tgz", - "integrity": "sha512-PkYEs7zzUVWnhkR9TlU1ORDcCnkD7qoqR1loXXSZc+EIOX9M7f+sXGLtCXVl9wV1Ekx3a5Tjud+aQcOJjjFePA==", - "requires": { - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-ini": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.290.0.tgz", - "integrity": "sha512-n3OGvkvNgMS6Kb2fuFrmNeCI8CP7DGOsEvcfYPMiXsQWx9hHAh/XIv7ksD3TL5Mn8Dr0NHmB6uY5WgUZDatqfw==", - "requires": { - "@aws-sdk/credential-provider-env": "3.290.0", - "@aws-sdk/credential-provider-imds": "3.290.0", - "@aws-sdk/credential-provider-process": "3.290.0", - "@aws-sdk/credential-provider-sso": "3.290.0", - "@aws-sdk/credential-provider-web-identity": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.290.0.tgz", - "integrity": "sha512-snLmeD7yAYq1x7lngCTM1VGmHYCZ4iUW5JRG9XPr7Npl7VWVdnNqaf5XBYEANgaFoWxjN3dNyDPg05+5Ew6QCA==", - "requires": { - "@aws-sdk/credential-provider-env": "3.290.0", - "@aws-sdk/credential-provider-imds": "3.290.0", - "@aws-sdk/credential-provider-ini": "3.290.0", - "@aws-sdk/credential-provider-process": "3.290.0", - "@aws-sdk/credential-provider-sso": "3.290.0", - "@aws-sdk/credential-provider-web-identity": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-process": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.290.0.tgz", - "integrity": "sha512-PNnWDYSaE8dMepH59cyrXs45Ucdmzdnyuhcn/fVwQ0Nc7FzESxw1G7SgJZhYF4tMRDiepu6lbFEN0QXsTIM8Iw==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-sso": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.290.0.tgz", - "integrity": "sha512-tX5Ez3EiMrXDx6Vsn2gMq7ga3y4iyPneenCNToRUlmZrhF61DhMfA22gRwdwuP8hlFKXY4LRg51pBfJeq0ga8w==", - "requires": { - "@aws-sdk/client-sso": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/token-providers": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-web-identity": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.290.0.tgz", - "integrity": "sha512-Apv6AnYtb5LTUreDVsqlXFNgiU0TQAZ8sfPg23pGrBGZvZU3KfDhF9n5j0i9Uca44O+/vB7UvbbvNAZS200vsQ==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/fetch-http-handler": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.290.0.tgz", - "integrity": "sha512-hehbIxcqyJeiUBTbbP3C4tmY2p9UIh7bnLTKhocqaUcdEXQwlIRiQlnnA+TrQ5Uyoe+W3fAmv25tq08rB9ddhw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/querystring-builder": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/hash-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/hash-node/-/hash-node-3.290.0.tgz", - "integrity": "sha512-ayqJBOPoMa3H3eUhZHPu9ikNjoydu3nxj+R6tp8nMrKfFYDUu0XCdkpB0Wk/EBpMyWA2ZeyyfgXEUtQkqkAWBA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-buffer-from": "3.208.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/invalid-dependency": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/invalid-dependency/-/invalid-dependency-3.290.0.tgz", - "integrity": "sha512-plJpEJ+PPTrpaMfg5KKsAfdXUi6iUZTc/PgP0/CPqCe3kuiWb1xb2GeTxOL5InzfBffVdHWeTanYu9+V0MIxVw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/is-array-buffer": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.201.0.tgz", - "integrity": "sha512-UPez5qLh3dNgt0DYnPD/q0mVJY84rA17QE26hVNOW3fAji8W2wrwrxdacWOxyXvlxWsVRcKmr+lay1MDqpAMfg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-content-length": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-content-length/-/middleware-content-length-3.290.0.tgz", - "integrity": "sha512-9I+vnGSe/S0U98ZRCbOAdQngYfO7kYvXb5gjjX08XUQDfbB+ooIM1VdKngHhzUCTAs48z/43PzpBCjGJvGjB9w==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-host-header": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.290.0.tgz", - "integrity": "sha512-j1ss8pjSJyG0aB+X0VPYgTfoieB8m5c+PrWw85JRM/qgbQeurkyD3d/F00V1hkZI42gygOaPlmYMik3kQnmITw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-logger": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.290.0.tgz", - "integrity": "sha512-wJOK31t/Y/Km6B5ULF/k2RmQB/6MXSN/hMuCiYsLMapFT+86mBlY8cXytYXtLS8afRKpuNy29EY+O6ovfjz6Ig==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-retry": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-retry/-/middleware-retry-3.290.0.tgz", - "integrity": "sha512-mvXvYd/3L/f5ZcnFI1Q2hwk0OtzKMmkDfWW1BcoVzK0XHf2aeehbs7xgI92ICEi/5Ali0IG5krv5LqfgO154Sw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/service-error-classification": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "tslib": "^2.3.1", - "uuid": "^8.3.2" - } - }, - "@aws-sdk/middleware-sdk-sts": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.290.0.tgz", - "integrity": "sha512-NaYnDhFtmz/e9jNBNeY10A4AldCvjF46ZfeIWoBUsk/4qDlSP9kaCjTufEjNf/zMTtYzGiP/FUtLS7T6tfXdoQ==", - "requires": { - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-serde": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-serde/-/middleware-serde-3.290.0.tgz", - "integrity": "sha512-lZCKlfJzosi3cVx02RKRTVvbAijHTfd16EiSyKRsQOF2rCu7Qt4LzygIlqUonCeHG6eSqOMMf7LAJ22IHafBbw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-signing": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.290.0.tgz", - "integrity": "sha512-mEJZQrbXkOTI+BdFlpAd1CleVJL8B7qayANMNj9nrZqvZ7HzVDLEkNaJqFz9JFizYTfZC2ZjtATPrSiYDvFEfg==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-middleware": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-stack": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-stack/-/middleware-stack-3.290.0.tgz", - "integrity": "sha512-25iC/7oAokRfxixGkDjBSIAkNwtx2kcO+xMoDczFus9UrlOr2pBY0IXbPn6bB56q2zwsBTHcmMTn0H7FJSIQmw==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-user-agent": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.290.0.tgz", - "integrity": "sha512-ZR49PPra3LtqZBmXAtV8YrUSrkVG0hPBICE8cma/wMwbKGHa0G+Xu4pOZP0oQXs5XeGu1cs/Nx3AOJ2fgaMjhQ==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/node-config-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-config-provider/-/node-config-provider-3.290.0.tgz", - "integrity": "sha512-dQLnyCy5iT7Q5Ot2JOciNH9WkaixWwmEnvW6nBa6febzAYZVy78sfJOOP1EZ7ClG1aIhrsAN7/7wPebpn/Peiw==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/node-http-handler": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-http-handler/-/node-http-handler-3.290.0.tgz", - "integrity": "sha512-HfzuzdpAJpO/ob9DQ3aEB/WmPCS5vZOic9T4TtSCmRd5e3+xdMtK/MQUizp8XkbUGBat7jPmcV13Gy4YmwfAuw==", - "requires": { - "@aws-sdk/abort-controller": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/querystring-builder": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/property-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.290.0.tgz", - "integrity": "sha512-2Zrh6/KecmiZ/cKVaeDtHRAfyOnAEfwJsgxfFugs3RxjJtYr0AbYJTF+mYp3f8Xc7DCjdxR055axo9TCTBSrwg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/protocol-http": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.290.0.tgz", - "integrity": "sha512-3VHbfmo7vaA/0ugJedjwyK85MT+OKQanz7ktUnAONH5KdG2/gPpa9ZSTyfK9kCVFin93YzC3pznZgr6oNYgGgg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/querystring-builder": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-builder/-/querystring-builder-3.290.0.tgz", - "integrity": "sha512-7q8x8ux1RCUxUolqxsXfSbCObyMzvSwfJb9GgZ8rDi2U61l8W760a9ejHzizfQJvdldRSwFqmynkRAqYbvKixg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/querystring-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.290.0.tgz", - "integrity": "sha512-8QPDihJKSFYFphxUl5+FfXMQowhAoHuDeoqd1ce3byL0bm7k8emcGfiYD6QGxuDlpno+F4O1/Mz+e+cwNCdPVA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/service-error-classification": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/service-error-classification/-/service-error-classification-3.290.0.tgz", - "integrity": "sha512-QP+QgL5Gm6RKl4KGwTRyG1kw0SxBbcmp/a/yhywVHmRI0/+4VsL+cooTqtjFr3xVmKoCX+/JZZ8P96VGFvRSZA==" - }, - "@aws-sdk/shared-ini-file-loader": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.290.0.tgz", - "integrity": "sha512-kvLW5rwr4lwHdwkYnoHYpFVfWwZYwQO44eRnkrDnyvvhZTcCH3rBLApu6uvomnL+Ep4bEJ1anDKt3WywlGg5Qw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/signature-v4": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.290.0.tgz", - "integrity": "sha512-SUMflc8b8PC0ITV3AdYBSlTcn4oFjumBAPNNXBLKIpifQ1l7ZufFIulDPlqeouXTDwsuCVINAwE0DbItDe/7Qw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-hex-encoding": "3.201.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/smithy-client": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.290.0.tgz", - "integrity": "sha512-MDa+BJqM1FP2HYugVAscufoLJuapEdUTZPoyERVGfUEznKfKH33QXRoeqW1wzUNyhcxFONHLnXp1aYFBtnLx7g==", - "requires": { - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/url-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.290.0.tgz", - "integrity": "sha512-19EAlyH4LyNMbAROE6KSuhFKhOwl67kciDavPjS8gFiHr6slon3oqXfz10+uzKf/pJKuY6qOpkUb9h7LnF4bFQ==", - "requires": { - "@aws-sdk/querystring-parser": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-body-length-browser": { - "version": "3.188.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.188.0.tgz", - "integrity": "sha512-8VpnwFWXhnZ/iRSl9mTf+VKOX9wDE8QtN4bj9pBfxwf90H1X7E8T6NkiZD3k+HubYf2J94e7DbeHs7fuCPW5Qg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-body-length-node": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-node/-/util-body-length-node-3.208.0.tgz", - "integrity": "sha512-3zj50e5g7t/MQf53SsuuSf0hEELzMtD8RX8C76f12OSRo2Bca4FLLYHe0TZbxcfQHom8/hOaeZEyTyMogMglqg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-buffer-from": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.208.0.tgz", - "integrity": "sha512-7L0XUixNEFcLUGPeBF35enCvB9Xl+K6SQsmbrPk1P3mlV9mguWSDQqbOBwY1Ir0OVbD6H/ZOQU7hI/9RtRI0Zw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-hex-encoding": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.201.0.tgz", - "integrity": "sha512-7t1vR1pVxKx0motd3X9rI3m/xNp78p3sHtP5yo4NP4ARpxyJ0fokBomY8ScaH2D/B+U5o9ARxldJUdMqyBlJcA==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-uri-escape": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.201.0.tgz", - "integrity": "sha512-TeTWbGx4LU2c5rx0obHeDFeO9HvwYwQtMh1yniBz00pQb6Qt6YVOETVQikRZ+XRQwEyCg/dA375UplIpiy54mA==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-user-agent-browser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.290.0.tgz", - "integrity": "sha512-I+B5ooKRYQ9jHcdg7TOf20LlTfcBUlCJQ2AAqI1ukmJqal22OD1CtC1E+/XbplpU5mxRs4s2UQbxNaPA0yIrBA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "bowser": "^2.11.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-user-agent-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.290.0.tgz", - "integrity": "sha512-7juKgEMqpa0il6jZmiBKGDJslM4UIKX1bvhlqkSvvPfV3zFdfi0V2xavh68GfelWduBBkYLGRjsLunqzw64f8A==", - "requires": { - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "fast-xml-parser": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.1.2.tgz", - "integrity": "sha512-CDYeykkle1LiA/uqQyNwYpFbyF6Axec6YapmpUP+/RHWIoR1zKjocdvNaTsxCxZzQ6v9MLXaSYm9Qq0thv0DHg==", - "requires": { - "strnum": "^1.0.5" - } - }, + } + }, + "@aws-sdk/client-cloudwatch-logs": { + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-cloudwatch-logs/-/client-cloudwatch-logs-3.293.0.tgz", + "integrity": "sha512-sWOkurEYU6u2/PM7aV22hfnbCz4TbM+mdIz9VJ/sZ0hiR2xN0iaGXGYIQ+ZXgQt91mS5VhqpXUkffCP/grM0Aw==", + "requires": { + "@aws-crypto/sha256-browser": "3.0.0", + "@aws-crypto/sha256-js": "3.0.0", + "@aws-sdk/client-sts": "3.293.0", + "@aws-sdk/config-resolver": "3.292.0", + "@aws-sdk/credential-provider-node": "3.293.0", + "@aws-sdk/fetch-http-handler": "3.292.0", + "@aws-sdk/hash-node": "3.292.0", + "@aws-sdk/invalid-dependency": "3.292.0", + "@aws-sdk/middleware-content-length": "3.292.0", + "@aws-sdk/middleware-endpoint": "3.292.0", + "@aws-sdk/middleware-host-header": "3.292.0", + "@aws-sdk/middleware-logger": "3.292.0", + "@aws-sdk/middleware-recursion-detection": "3.292.0", + "@aws-sdk/middleware-retry": "3.293.0", + "@aws-sdk/middleware-serde": "3.292.0", + "@aws-sdk/middleware-signing": "3.292.0", + "@aws-sdk/middleware-stack": "3.292.0", + "@aws-sdk/middleware-user-agent": "3.293.0", + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/node-http-handler": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/smithy-client": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/url-parser": "3.292.0", + "@aws-sdk/util-base64": "3.292.0", + "@aws-sdk/util-body-length-browser": "3.292.0", + "@aws-sdk/util-body-length-node": "3.292.0", + "@aws-sdk/util-defaults-mode-browser": "3.292.0", + "@aws-sdk/util-defaults-mode-node": "3.292.0", + "@aws-sdk/util-endpoints": "3.293.0", + "@aws-sdk/util-retry": "3.292.0", + "@aws-sdk/util-user-agent-browser": "3.292.0", + "@aws-sdk/util-user-agent-node": "3.292.0", + "@aws-sdk/util-utf8": "3.292.0", + "tslib": "^2.3.1" + }, + "dependencies": { "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" } } }, "@aws-sdk/client-secrets-manager": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-secrets-manager/-/client-secrets-manager-3.290.0.tgz", - "integrity": "sha512-CI87rdygImLmMcOQ/goRN2l0ABKaz/XCgEPag4p+7W9/0/eGK4B9WblWNhC2oRFa82nwV8+Scan5oLuzcI2oHQ==", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-secrets-manager/-/client-secrets-manager-3.293.0.tgz", + "integrity": "sha512-xSRtwuUO3jVumBTuCuXR25+veFPlne3xUzl9+v5Km1IR/p53bgN6r/0nPelBHG2LKS/nmWXqhXsIRCCVDUmztg==", "requires": { "@aws-crypto/sha256-browser": "3.0.0", "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/client-sts": "3.290.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/credential-provider-node": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", + "@aws-sdk/client-sts": "3.293.0", + "@aws-sdk/config-resolver": "3.292.0", + "@aws-sdk/credential-provider-node": "3.293.0", + "@aws-sdk/fetch-http-handler": "3.292.0", + "@aws-sdk/hash-node": "3.292.0", + "@aws-sdk/invalid-dependency": "3.292.0", + "@aws-sdk/middleware-content-length": "3.292.0", + "@aws-sdk/middleware-endpoint": "3.292.0", + "@aws-sdk/middleware-host-header": "3.292.0", + "@aws-sdk/middleware-logger": "3.292.0", + "@aws-sdk/middleware-recursion-detection": "3.292.0", + "@aws-sdk/middleware-retry": "3.293.0", + "@aws-sdk/middleware-serde": "3.292.0", + "@aws-sdk/middleware-signing": "3.292.0", + "@aws-sdk/middleware-stack": "3.292.0", + "@aws-sdk/middleware-user-agent": "3.293.0", + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/node-http-handler": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/smithy-client": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/url-parser": "3.292.0", + "@aws-sdk/util-base64": "3.292.0", + "@aws-sdk/util-body-length-browser": "3.292.0", + "@aws-sdk/util-body-length-node": "3.292.0", + "@aws-sdk/util-defaults-mode-browser": "3.292.0", + "@aws-sdk/util-defaults-mode-node": "3.292.0", + "@aws-sdk/util-endpoints": "3.293.0", + "@aws-sdk/util-retry": "3.292.0", + "@aws-sdk/util-user-agent-browser": "3.292.0", + "@aws-sdk/util-user-agent-node": "3.292.0", + "@aws-sdk/util-utf8": "3.292.0", "tslib": "^2.3.1", "uuid": "^8.3.2" }, @@ -1442,41 +234,41 @@ } }, "@aws-sdk/client-sso": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.290.0.tgz", - "integrity": "sha512-FUFAbptuJSRKnzBgFJqXxusSG7PzECSqX0FnMh2vxCVu2PifaAE4stiMW8Myj8ABQAbfIrAWM+17upcrfmudoA==", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.293.0.tgz", + "integrity": "sha512-EtVgEqL4vSDAV6vi9QzeZA5M+CIQIPoy14Q6Gl7TWehakxBqGFw2xnEHBo2djWH5oJMQAGOfjICPkZLoKxJT1A==", "requires": { "@aws-crypto/sha256-browser": "3.0.0", "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", + "@aws-sdk/config-resolver": "3.292.0", + "@aws-sdk/fetch-http-handler": "3.292.0", + "@aws-sdk/hash-node": "3.292.0", + "@aws-sdk/invalid-dependency": "3.292.0", + "@aws-sdk/middleware-content-length": "3.292.0", + "@aws-sdk/middleware-endpoint": "3.292.0", + "@aws-sdk/middleware-host-header": "3.292.0", + "@aws-sdk/middleware-logger": "3.292.0", + "@aws-sdk/middleware-recursion-detection": "3.292.0", + "@aws-sdk/middleware-retry": "3.293.0", + "@aws-sdk/middleware-serde": "3.292.0", + "@aws-sdk/middleware-stack": "3.292.0", + "@aws-sdk/middleware-user-agent": "3.293.0", + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/node-http-handler": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/smithy-client": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/url-parser": "3.292.0", + "@aws-sdk/util-base64": "3.292.0", + "@aws-sdk/util-body-length-browser": "3.292.0", + "@aws-sdk/util-body-length-node": "3.292.0", + "@aws-sdk/util-defaults-mode-browser": "3.292.0", + "@aws-sdk/util-defaults-mode-node": "3.292.0", + "@aws-sdk/util-endpoints": "3.293.0", + "@aws-sdk/util-retry": "3.292.0", + "@aws-sdk/util-user-agent-browser": "3.292.0", + "@aws-sdk/util-user-agent-node": "3.292.0", + "@aws-sdk/util-utf8": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -1488,489 +280,90 @@ } }, "@aws-sdk/client-sso-oidc": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.290.0.tgz", - "integrity": "sha512-/+OSYCjyf2TjA57beWLBjG05yPwWlpqK4gO3GwpVqfygaRh6g5jS0CBVQs+z+xc7gmI0weC/nhc+BXR9qcJJAA==", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.293.0.tgz", + "integrity": "sha512-GrbcBzRxWNRc5unZ0rOe1Jzhjvf7xIiCfLDhXYKaafb38gxUc3vDPy4Uzih6Trcq525oB0fG7iiZJgstMXelcw==", "requires": { "@aws-crypto/sha256-browser": "3.0.0", "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", + "@aws-sdk/config-resolver": "3.292.0", + "@aws-sdk/fetch-http-handler": "3.292.0", + "@aws-sdk/hash-node": "3.292.0", + "@aws-sdk/invalid-dependency": "3.292.0", + "@aws-sdk/middleware-content-length": "3.292.0", + "@aws-sdk/middleware-endpoint": "3.292.0", + "@aws-sdk/middleware-host-header": "3.292.0", + "@aws-sdk/middleware-logger": "3.292.0", + "@aws-sdk/middleware-recursion-detection": "3.292.0", + "@aws-sdk/middleware-retry": "3.293.0", + "@aws-sdk/middleware-serde": "3.292.0", + "@aws-sdk/middleware-stack": "3.292.0", + "@aws-sdk/middleware-user-agent": "3.293.0", + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/node-http-handler": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/smithy-client": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/url-parser": "3.292.0", + "@aws-sdk/util-base64": "3.292.0", + "@aws-sdk/util-body-length-browser": "3.292.0", + "@aws-sdk/util-body-length-node": "3.292.0", + "@aws-sdk/util-defaults-mode-browser": "3.292.0", + "@aws-sdk/util-defaults-mode-node": "3.292.0", + "@aws-sdk/util-endpoints": "3.293.0", + "@aws-sdk/util-retry": "3.292.0", + "@aws-sdk/util-user-agent-browser": "3.292.0", + "@aws-sdk/util-user-agent-node": "3.292.0", + "@aws-sdk/util-utf8": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-crypto/ie11-detection": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz", - "integrity": "sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==", - "requires": { - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/sha256-browser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz", - "integrity": "sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==", - "requires": { - "@aws-crypto/ie11-detection": "^3.0.0", - "@aws-crypto/sha256-js": "^3.0.0", - "@aws-crypto/supports-web-crypto": "^3.0.0", - "@aws-crypto/util": "^3.0.0", - "@aws-sdk/types": "^3.222.0", - "@aws-sdk/util-locate-window": "^3.0.0", - "@aws-sdk/util-utf8-browser": "^3.0.0", - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/sha256-js": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz", - "integrity": "sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==", - "requires": { - "@aws-crypto/util": "^3.0.0", - "@aws-sdk/types": "^3.222.0", - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/supports-web-crypto": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz", - "integrity": "sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==", - "requires": { - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-crypto/util": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-3.0.0.tgz", - "integrity": "sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==", - "requires": { - "@aws-sdk/types": "^3.222.0", - "@aws-sdk/util-utf8-browser": "^3.0.0", - "tslib": "^1.11.1" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, - "@aws-sdk/abort-controller": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/abort-controller/-/abort-controller-3.290.0.tgz", - "integrity": "sha512-Q4AqucQnhcsauH6tDf1bSRuOW/Ejwjs1qHPLlvknwX1IoxZettP3lXz9LLd8KZnEMFQLHPmBTbFIW+Ivpzl+vw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/config-resolver": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.290.0.tgz", - "integrity": "sha512-Ovskri6IR4iBK0+3ttgjPSgOUEC+fd5tqRN5JlPCCZ9VwqwF/z26yYC4fAPaMUAJwPVRFeYYzQoszXGoxPyG7g==", - "requires": { - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-config-provider": "3.208.0", - "@aws-sdk/util-middleware": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/fetch-http-handler": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.290.0.tgz", - "integrity": "sha512-hehbIxcqyJeiUBTbbP3C4tmY2p9UIh7bnLTKhocqaUcdEXQwlIRiQlnnA+TrQ5Uyoe+W3fAmv25tq08rB9ddhw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/querystring-builder": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/hash-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/hash-node/-/hash-node-3.290.0.tgz", - "integrity": "sha512-ayqJBOPoMa3H3eUhZHPu9ikNjoydu3nxj+R6tp8nMrKfFYDUu0XCdkpB0Wk/EBpMyWA2ZeyyfgXEUtQkqkAWBA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-buffer-from": "3.208.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/invalid-dependency": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/invalid-dependency/-/invalid-dependency-3.290.0.tgz", - "integrity": "sha512-plJpEJ+PPTrpaMfg5KKsAfdXUi6iUZTc/PgP0/CPqCe3kuiWb1xb2GeTxOL5InzfBffVdHWeTanYu9+V0MIxVw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/is-array-buffer": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.201.0.tgz", - "integrity": "sha512-UPez5qLh3dNgt0DYnPD/q0mVJY84rA17QE26hVNOW3fAji8W2wrwrxdacWOxyXvlxWsVRcKmr+lay1MDqpAMfg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-content-length": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-content-length/-/middleware-content-length-3.290.0.tgz", - "integrity": "sha512-9I+vnGSe/S0U98ZRCbOAdQngYfO7kYvXb5gjjX08XUQDfbB+ooIM1VdKngHhzUCTAs48z/43PzpBCjGJvGjB9w==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-host-header": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.290.0.tgz", - "integrity": "sha512-j1ss8pjSJyG0aB+X0VPYgTfoieB8m5c+PrWw85JRM/qgbQeurkyD3d/F00V1hkZI42gygOaPlmYMik3kQnmITw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-logger": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.290.0.tgz", - "integrity": "sha512-wJOK31t/Y/Km6B5ULF/k2RmQB/6MXSN/hMuCiYsLMapFT+86mBlY8cXytYXtLS8afRKpuNy29EY+O6ovfjz6Ig==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-retry": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-retry/-/middleware-retry-3.290.0.tgz", - "integrity": "sha512-mvXvYd/3L/f5ZcnFI1Q2hwk0OtzKMmkDfWW1BcoVzK0XHf2aeehbs7xgI92ICEi/5Ali0IG5krv5LqfgO154Sw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/service-error-classification": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "tslib": "^2.3.1", - "uuid": "^8.3.2" - } - }, - "@aws-sdk/middleware-serde": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-serde/-/middleware-serde-3.290.0.tgz", - "integrity": "sha512-lZCKlfJzosi3cVx02RKRTVvbAijHTfd16EiSyKRsQOF2rCu7Qt4LzygIlqUonCeHG6eSqOMMf7LAJ22IHafBbw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-stack": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-stack/-/middleware-stack-3.290.0.tgz", - "integrity": "sha512-25iC/7oAokRfxixGkDjBSIAkNwtx2kcO+xMoDczFus9UrlOr2pBY0IXbPn6bB56q2zwsBTHcmMTn0H7FJSIQmw==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-user-agent": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.290.0.tgz", - "integrity": "sha512-ZR49PPra3LtqZBmXAtV8YrUSrkVG0hPBICE8cma/wMwbKGHa0G+Xu4pOZP0oQXs5XeGu1cs/Nx3AOJ2fgaMjhQ==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/node-config-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-config-provider/-/node-config-provider-3.290.0.tgz", - "integrity": "sha512-dQLnyCy5iT7Q5Ot2JOciNH9WkaixWwmEnvW6nBa6febzAYZVy78sfJOOP1EZ7ClG1aIhrsAN7/7wPebpn/Peiw==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/node-http-handler": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-http-handler/-/node-http-handler-3.290.0.tgz", - "integrity": "sha512-HfzuzdpAJpO/ob9DQ3aEB/WmPCS5vZOic9T4TtSCmRd5e3+xdMtK/MQUizp8XkbUGBat7jPmcV13Gy4YmwfAuw==", - "requires": { - "@aws-sdk/abort-controller": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/querystring-builder": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/property-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.290.0.tgz", - "integrity": "sha512-2Zrh6/KecmiZ/cKVaeDtHRAfyOnAEfwJsgxfFugs3RxjJtYr0AbYJTF+mYp3f8Xc7DCjdxR055axo9TCTBSrwg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/protocol-http": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.290.0.tgz", - "integrity": "sha512-3VHbfmo7vaA/0ugJedjwyK85MT+OKQanz7ktUnAONH5KdG2/gPpa9ZSTyfK9kCVFin93YzC3pznZgr6oNYgGgg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/querystring-builder": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-builder/-/querystring-builder-3.290.0.tgz", - "integrity": "sha512-7q8x8ux1RCUxUolqxsXfSbCObyMzvSwfJb9GgZ8rDi2U61l8W760a9ejHzizfQJvdldRSwFqmynkRAqYbvKixg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/querystring-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.290.0.tgz", - "integrity": "sha512-8QPDihJKSFYFphxUl5+FfXMQowhAoHuDeoqd1ce3byL0bm7k8emcGfiYD6QGxuDlpno+F4O1/Mz+e+cwNCdPVA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/service-error-classification": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/service-error-classification/-/service-error-classification-3.290.0.tgz", - "integrity": "sha512-QP+QgL5Gm6RKl4KGwTRyG1kw0SxBbcmp/a/yhywVHmRI0/+4VsL+cooTqtjFr3xVmKoCX+/JZZ8P96VGFvRSZA==" - }, - "@aws-sdk/shared-ini-file-loader": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.290.0.tgz", - "integrity": "sha512-kvLW5rwr4lwHdwkYnoHYpFVfWwZYwQO44eRnkrDnyvvhZTcCH3rBLApu6uvomnL+Ep4bEJ1anDKt3WywlGg5Qw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/signature-v4": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.290.0.tgz", - "integrity": "sha512-SUMflc8b8PC0ITV3AdYBSlTcn4oFjumBAPNNXBLKIpifQ1l7ZufFIulDPlqeouXTDwsuCVINAwE0DbItDe/7Qw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-hex-encoding": "3.201.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/smithy-client": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.290.0.tgz", - "integrity": "sha512-MDa+BJqM1FP2HYugVAscufoLJuapEdUTZPoyERVGfUEznKfKH33QXRoeqW1wzUNyhcxFONHLnXp1aYFBtnLx7g==", - "requires": { - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/url-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.290.0.tgz", - "integrity": "sha512-19EAlyH4LyNMbAROE6KSuhFKhOwl67kciDavPjS8gFiHr6slon3oqXfz10+uzKf/pJKuY6qOpkUb9h7LnF4bFQ==", - "requires": { - "@aws-sdk/querystring-parser": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-body-length-browser": { - "version": "3.188.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.188.0.tgz", - "integrity": "sha512-8VpnwFWXhnZ/iRSl9mTf+VKOX9wDE8QtN4bj9pBfxwf90H1X7E8T6NkiZD3k+HubYf2J94e7DbeHs7fuCPW5Qg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-body-length-node": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-node/-/util-body-length-node-3.208.0.tgz", - "integrity": "sha512-3zj50e5g7t/MQf53SsuuSf0hEELzMtD8RX8C76f12OSRo2Bca4FLLYHe0TZbxcfQHom8/hOaeZEyTyMogMglqg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-buffer-from": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.208.0.tgz", - "integrity": "sha512-7L0XUixNEFcLUGPeBF35enCvB9Xl+K6SQsmbrPk1P3mlV9mguWSDQqbOBwY1Ir0OVbD6H/ZOQU7hI/9RtRI0Zw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-hex-encoding": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.201.0.tgz", - "integrity": "sha512-7t1vR1pVxKx0motd3X9rI3m/xNp78p3sHtP5yo4NP4ARpxyJ0fokBomY8ScaH2D/B+U5o9ARxldJUdMqyBlJcA==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-uri-escape": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.201.0.tgz", - "integrity": "sha512-TeTWbGx4LU2c5rx0obHeDFeO9HvwYwQtMh1yniBz00pQb6Qt6YVOETVQikRZ+XRQwEyCg/dA375UplIpiy54mA==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-user-agent-browser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.290.0.tgz", - "integrity": "sha512-I+B5ooKRYQ9jHcdg7TOf20LlTfcBUlCJQ2AAqI1ukmJqal22OD1CtC1E+/XbplpU5mxRs4s2UQbxNaPA0yIrBA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "bowser": "^2.11.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-user-agent-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.290.0.tgz", - "integrity": "sha512-7juKgEMqpa0il6jZmiBKGDJslM4UIKX1bvhlqkSvvPfV3zFdfi0V2xavh68GfelWduBBkYLGRjsLunqzw64f8A==", - "requires": { - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" } } }, "@aws-sdk/client-sts": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.290.0.tgz", - "integrity": "sha512-E2X/7tZLziKLgi/owYoUL5gcorGJrbM2tANJdJmaqVUPhPvoY4wU8P91pGPKon9nQj0RQexre5ClZawYD6lTzA==", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/client-sts/-/client-sts-3.293.0.tgz", + "integrity": "sha512-cNKWt9Xnv1sQvdLnzCdDJBRgavWH6g5F8TzrueaCq10cg/GanKkCgiIZFoKDv8LQ3dHzTkp/OKp4sN5N5DH/Ow==", "requires": { "@aws-crypto/sha256-browser": "3.0.0", "@aws-crypto/sha256-js": "3.0.0", - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/credential-provider-node": "3.290.0", - "@aws-sdk/fetch-http-handler": "3.290.0", - "@aws-sdk/hash-node": "3.290.0", - "@aws-sdk/invalid-dependency": "3.290.0", - "@aws-sdk/middleware-content-length": "3.290.0", - "@aws-sdk/middleware-endpoint": "3.290.0", - "@aws-sdk/middleware-host-header": "3.290.0", - "@aws-sdk/middleware-logger": "3.290.0", - "@aws-sdk/middleware-recursion-detection": "3.290.0", - "@aws-sdk/middleware-retry": "3.290.0", - "@aws-sdk/middleware-sdk-sts": "3.290.0", - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/middleware-user-agent": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/node-http-handler": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/smithy-client": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", - "@aws-sdk/util-body-length-browser": "3.188.0", - "@aws-sdk/util-body-length-node": "3.208.0", - "@aws-sdk/util-defaults-mode-browser": "3.290.0", - "@aws-sdk/util-defaults-mode-node": "3.290.0", - "@aws-sdk/util-endpoints": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", - "@aws-sdk/util-user-agent-browser": "3.290.0", - "@aws-sdk/util-user-agent-node": "3.290.0", - "@aws-sdk/util-utf8": "3.254.0", + "@aws-sdk/config-resolver": "3.292.0", + "@aws-sdk/credential-provider-node": "3.293.0", + "@aws-sdk/fetch-http-handler": "3.292.0", + "@aws-sdk/hash-node": "3.292.0", + "@aws-sdk/invalid-dependency": "3.292.0", + "@aws-sdk/middleware-content-length": "3.292.0", + "@aws-sdk/middleware-endpoint": "3.292.0", + "@aws-sdk/middleware-host-header": "3.292.0", + "@aws-sdk/middleware-logger": "3.292.0", + "@aws-sdk/middleware-recursion-detection": "3.292.0", + "@aws-sdk/middleware-retry": "3.293.0", + "@aws-sdk/middleware-sdk-sts": "3.292.0", + "@aws-sdk/middleware-serde": "3.292.0", + "@aws-sdk/middleware-signing": "3.292.0", + "@aws-sdk/middleware-stack": "3.292.0", + "@aws-sdk/middleware-user-agent": "3.293.0", + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/node-http-handler": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/smithy-client": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/url-parser": "3.292.0", + "@aws-sdk/util-base64": "3.292.0", + "@aws-sdk/util-body-length-browser": "3.292.0", + "@aws-sdk/util-body-length-node": "3.292.0", + "@aws-sdk/util-defaults-mode-browser": "3.292.0", + "@aws-sdk/util-defaults-mode-node": "3.292.0", + "@aws-sdk/util-endpoints": "3.293.0", + "@aws-sdk/util-retry": "3.292.0", + "@aws-sdk/util-user-agent-browser": "3.292.0", + "@aws-sdk/util-user-agent-node": "3.292.0", + "@aws-sdk/util-utf8": "3.292.0", "fast-xml-parser": "4.1.2", "tslib": "^2.3.1" }, @@ -1983,14 +376,14 @@ } }, "@aws-sdk/config-resolver": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.290.0.tgz", - "integrity": "sha512-Ovskri6IR4iBK0+3ttgjPSgOUEC+fd5tqRN5JlPCCZ9VwqwF/z26yYC4fAPaMUAJwPVRFeYYzQoszXGoxPyG7g==", - "requires": { - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-config-provider": "3.208.0", - "@aws-sdk/util-middleware": "3.290.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.292.0.tgz", + "integrity": "sha512-cB3twnNR7vYvlt2jvw8VlA1+iv/tVzl+/S39MKqw2tepU+AbJAM0EHwb/dkf1OKSmlrnANXhshx80MHF9zL4mA==", + "requires": { + "@aws-sdk/signature-v4": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/util-config-provider": "3.292.0", + "@aws-sdk/util-middleware": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2002,12 +395,12 @@ } }, "@aws-sdk/credential-provider-env": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.290.0.tgz", - "integrity": "sha512-gWsllElBm4DWZcc42Zb6sxaw77KBf6cY9iEezbVzVbJioqR9hIr1Pq3Nx30z1Q+1KiHSnt/Wl9cYYHOoNw2DnQ==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-env/-/credential-provider-env-3.292.0.tgz", + "integrity": "sha512-YbafSG0ZEKE2969CJWVtUhh3hfOeLPecFVoXOtegCyAJgY5Ghtu4TsVhL4DgiGAgOC30ojAmUVQEXzd7xJF5xA==", "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2019,14 +412,14 @@ } }, "@aws-sdk/credential-provider-imds": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.290.0.tgz", - "integrity": "sha512-PkYEs7zzUVWnhkR9TlU1ORDcCnkD7qoqR1loXXSZc+EIOX9M7f+sXGLtCXVl9wV1Ekx3a5Tjud+aQcOJjjFePA==", - "requires": { - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.292.0.tgz", + "integrity": "sha512-W/peOgDSRYulgzFpUhvgi1pCm6piBz6xrVN17N4QOy+3NHBXRVMVzYk6ct2qpLPgJUSEZkcpP+Gds+bBm8ed1A==", + "requires": { + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/url-parser": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2038,18 +431,18 @@ } }, "@aws-sdk/credential-provider-ini": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.290.0.tgz", - "integrity": "sha512-n3OGvkvNgMS6Kb2fuFrmNeCI8CP7DGOsEvcfYPMiXsQWx9hHAh/XIv7ksD3TL5Mn8Dr0NHmB6uY5WgUZDatqfw==", - "requires": { - "@aws-sdk/credential-provider-env": "3.290.0", - "@aws-sdk/credential-provider-imds": "3.290.0", - "@aws-sdk/credential-provider-process": "3.290.0", - "@aws-sdk/credential-provider-sso": "3.290.0", - "@aws-sdk/credential-provider-web-identity": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.293.0.tgz", + "integrity": "sha512-Cy32aGm8Qc70Jc7VjcaxAEBfhLCS6/iewX4ZSI6MRoo0NrggnIwD9pdtO0Y0eqzEHXJvl2bycXFTJPmW4AzQIA==", + "requires": { + "@aws-sdk/credential-provider-env": "3.292.0", + "@aws-sdk/credential-provider-imds": "3.292.0", + "@aws-sdk/credential-provider-process": "3.292.0", + "@aws-sdk/credential-provider-sso": "3.293.0", + "@aws-sdk/credential-provider-web-identity": "3.292.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/shared-ini-file-loader": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2061,19 +454,19 @@ } }, "@aws-sdk/credential-provider-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.290.0.tgz", - "integrity": "sha512-snLmeD7yAYq1x7lngCTM1VGmHYCZ4iUW5JRG9XPr7Npl7VWVdnNqaf5XBYEANgaFoWxjN3dNyDPg05+5Ew6QCA==", - "requires": { - "@aws-sdk/credential-provider-env": "3.290.0", - "@aws-sdk/credential-provider-imds": "3.290.0", - "@aws-sdk/credential-provider-ini": "3.290.0", - "@aws-sdk/credential-provider-process": "3.290.0", - "@aws-sdk/credential-provider-sso": "3.290.0", - "@aws-sdk/credential-provider-web-identity": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.293.0.tgz", + "integrity": "sha512-w6NuuEiVZ5Ja2fmXbo5GiH2cykKw682HvL6bZ5Yhdj27twFL+4jUuXONxibQkXgTJbtiTx3tlcdLOa67RDq8ow==", + "requires": { + "@aws-sdk/credential-provider-env": "3.292.0", + "@aws-sdk/credential-provider-imds": "3.292.0", + "@aws-sdk/credential-provider-ini": "3.293.0", + "@aws-sdk/credential-provider-process": "3.292.0", + "@aws-sdk/credential-provider-sso": "3.293.0", + "@aws-sdk/credential-provider-web-identity": "3.292.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/shared-ini-file-loader": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2085,13 +478,13 @@ } }, "@aws-sdk/credential-provider-process": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.290.0.tgz", - "integrity": "sha512-PNnWDYSaE8dMepH59cyrXs45Ucdmzdnyuhcn/fVwQ0Nc7FzESxw1G7SgJZhYF4tMRDiepu6lbFEN0QXsTIM8Iw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-process/-/credential-provider-process-3.292.0.tgz", + "integrity": "sha512-CFVXuMuUvg/a4tknzRikEDwZBnKlHs1LZCpTXIGjBdUTdosoi4WNzDLzGp93ZRTtcgFz+4wirz2f7P3lC0NrQw==", "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/shared-ini-file-loader": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2103,15 +496,15 @@ } }, "@aws-sdk/credential-provider-sso": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.290.0.tgz", - "integrity": "sha512-tX5Ez3EiMrXDx6Vsn2gMq7ga3y4iyPneenCNToRUlmZrhF61DhMfA22gRwdwuP8hlFKXY4LRg51pBfJeq0ga8w==", - "requires": { - "@aws-sdk/client-sso": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/token-providers": "3.290.0", - "@aws-sdk/types": "3.290.0", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.293.0.tgz", + "integrity": "sha512-XdZW6mgAcV20AXrQ3FYKVZAO8LuFZwZnEf34Xc1Z2MuHkbSXxixPDu+mqbUKMwru1rmy6YaZ0eNuIbZYVCq0mw==", + "requires": { + "@aws-sdk/client-sso": "3.293.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/shared-ini-file-loader": "3.292.0", + "@aws-sdk/token-providers": "3.293.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2123,12 +516,12 @@ } }, "@aws-sdk/credential-provider-web-identity": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.290.0.tgz", - "integrity": "sha512-Apv6AnYtb5LTUreDVsqlXFNgiU0TQAZ8sfPg23pGrBGZvZU3KfDhF9n5j0i9Uca44O+/vB7UvbbvNAZS200vsQ==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.292.0.tgz", + "integrity": "sha512-4DbtIEM9gGVfqYlMdYXg3XY+vBhemjB1zXIequottW8loLYM8Vuz4/uGxxKNze6evVVzowsA0wKrYclE1aj/Rg==", "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2140,14 +533,14 @@ } }, "@aws-sdk/fetch-http-handler": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.290.0.tgz", - "integrity": "sha512-hehbIxcqyJeiUBTbbP3C4tmY2p9UIh7bnLTKhocqaUcdEXQwlIRiQlnnA+TrQ5Uyoe+W3fAmv25tq08rB9ddhw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/querystring-builder": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-base64": "3.208.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/fetch-http-handler/-/fetch-http-handler-3.292.0.tgz", + "integrity": "sha512-zh3bhUJbL8RSa39ZKDcy+AghtUkIP8LwcNlwRIoxMQh3Row4D1s4fCq0KZCx98NJBEXoiTLyTQlZxxI//BOb1Q==", + "requires": { + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/querystring-builder": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/util-base64": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2159,13 +552,13 @@ } }, "@aws-sdk/hash-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/hash-node/-/hash-node-3.290.0.tgz", - "integrity": "sha512-ayqJBOPoMa3H3eUhZHPu9ikNjoydu3nxj+R6tp8nMrKfFYDUu0XCdkpB0Wk/EBpMyWA2ZeyyfgXEUtQkqkAWBA==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/hash-node/-/hash-node-3.292.0.tgz", + "integrity": "sha512-1yLxmIsvE+eK36JXEgEIouTITdykQLVhsA5Oai//Lar6Ddgu1sFpLDbdkMtKbrh4I0jLN9RacNCkeVQjZPTCCQ==", "requires": { - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-buffer-from": "3.208.0", - "@aws-sdk/util-utf8": "3.254.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/util-buffer-from": "3.292.0", + "@aws-sdk/util-utf8": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2177,11 +570,11 @@ } }, "@aws-sdk/invalid-dependency": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/invalid-dependency/-/invalid-dependency-3.290.0.tgz", - "integrity": "sha512-plJpEJ+PPTrpaMfg5KKsAfdXUi6iUZTc/PgP0/CPqCe3kuiWb1xb2GeTxOL5InzfBffVdHWeTanYu9+V0MIxVw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/invalid-dependency/-/invalid-dependency-3.292.0.tgz", + "integrity": "sha512-39OUV78CD3TmEbjhpt+V+Fk4wAGWhixqHxDSN8+4WL0uB4Fl7k5m3Z9hNY78AttHQSl2twR7WtLztnXPAFsriw==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2193,9 +586,9 @@ } }, "@aws-sdk/is-array-buffer": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.201.0.tgz", - "integrity": "sha512-UPez5qLh3dNgt0DYnPD/q0mVJY84rA17QE26hVNOW3fAji8W2wrwrxdacWOxyXvlxWsVRcKmr+lay1MDqpAMfg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.292.0.tgz", + "integrity": "sha512-kW/G5T/fzI0sJH5foZG6XJiNCevXqKLxV50qIT4B1pMuw7regd4ALIy0HwSqj1nnn9mSbRWBfmby0jWCJsMcwg==", "requires": { "tslib": "^2.3.1" }, @@ -2208,12 +601,12 @@ } }, "@aws-sdk/middleware-content-length": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-content-length/-/middleware-content-length-3.290.0.tgz", - "integrity": "sha512-9I+vnGSe/S0U98ZRCbOAdQngYfO7kYvXb5gjjX08XUQDfbB+ooIM1VdKngHhzUCTAs48z/43PzpBCjGJvGjB9w==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-content-length/-/middleware-content-length-3.292.0.tgz", + "integrity": "sha512-2gMWzQus5mj14menolpPDbYBeaOYcj7KNFZOjTjjI3iQ0KqyetG6XasirNrcJ/8QX1BRmpTol8Xjp2Ue3Gbzwg==", "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2225,103 +618,20 @@ } }, "@aws-sdk/middleware-endpoint": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint/-/middleware-endpoint-3.290.0.tgz", - "integrity": "sha512-A7wIujIHHoQaQaqjlRynqoZ3S4S8ExYDReXUBwf4Dzx0wZ5A50owLMY9MKFyd9uukirZs8mDnPPYZuwUI4wR7w==", - "requires": { - "@aws-sdk/middleware-serde": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "@aws-sdk/util-config-provider": "3.208.0", - "@aws-sdk/util-middleware": "3.290.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-endpoint/-/middleware-endpoint-3.292.0.tgz", + "integrity": "sha512-cPMkiSxpZGG6tYlW4OS+ucS6r43f9ddX9kcUoemJCY10MOuogdPjulCAjE0HTs2PLKSOrrG4CTP4Q4wWDrH4Bw==", + "requires": { + "@aws-sdk/middleware-serde": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/signature-v4": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/url-parser": "3.292.0", + "@aws-sdk/util-config-provider": "3.292.0", + "@aws-sdk/util-middleware": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-sdk/is-array-buffer": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.201.0.tgz", - "integrity": "sha512-UPez5qLh3dNgt0DYnPD/q0mVJY84rA17QE26hVNOW3fAji8W2wrwrxdacWOxyXvlxWsVRcKmr+lay1MDqpAMfg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/middleware-serde": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-serde/-/middleware-serde-3.290.0.tgz", - "integrity": "sha512-lZCKlfJzosi3cVx02RKRTVvbAijHTfd16EiSyKRsQOF2rCu7Qt4LzygIlqUonCeHG6eSqOMMf7LAJ22IHafBbw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/protocol-http": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.290.0.tgz", - "integrity": "sha512-3VHbfmo7vaA/0ugJedjwyK85MT+OKQanz7ktUnAONH5KdG2/gPpa9ZSTyfK9kCVFin93YzC3pznZgr6oNYgGgg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/querystring-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.290.0.tgz", - "integrity": "sha512-8QPDihJKSFYFphxUl5+FfXMQowhAoHuDeoqd1ce3byL0bm7k8emcGfiYD6QGxuDlpno+F4O1/Mz+e+cwNCdPVA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/signature-v4": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.290.0.tgz", - "integrity": "sha512-SUMflc8b8PC0ITV3AdYBSlTcn4oFjumBAPNNXBLKIpifQ1l7ZufFIulDPlqeouXTDwsuCVINAwE0DbItDe/7Qw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-hex-encoding": "3.201.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/url-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.290.0.tgz", - "integrity": "sha512-19EAlyH4LyNMbAROE6KSuhFKhOwl67kciDavPjS8gFiHr6slon3oqXfz10+uzKf/pJKuY6qOpkUb9h7LnF4bFQ==", - "requires": { - "@aws-sdk/querystring-parser": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-hex-encoding": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.201.0.tgz", - "integrity": "sha512-7t1vR1pVxKx0motd3X9rI3m/xNp78p3sHtP5yo4NP4ARpxyJ0fokBomY8ScaH2D/B+U5o9ARxldJUdMqyBlJcA==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-uri-escape": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.201.0.tgz", - "integrity": "sha512-TeTWbGx4LU2c5rx0obHeDFeO9HvwYwQtMh1yniBz00pQb6Qt6YVOETVQikRZ+XRQwEyCg/dA375UplIpiy54mA==", - "requires": { - "tslib": "^2.3.1" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -2330,12 +640,12 @@ } }, "@aws-sdk/middleware-host-header": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.290.0.tgz", - "integrity": "sha512-j1ss8pjSJyG0aB+X0VPYgTfoieB8m5c+PrWw85JRM/qgbQeurkyD3d/F00V1hkZI42gygOaPlmYMik3kQnmITw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-host-header/-/middleware-host-header-3.292.0.tgz", + "integrity": "sha512-mHuCWe3Yg2S5YZ7mB7sKU6C97XspfqrimWjMW9pfV2usAvLA3R0HrB03jpR5vpZ3P4q7HB6wK3S6CjYMGGRNag==", "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2347,11 +657,11 @@ } }, "@aws-sdk/middleware-logger": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.290.0.tgz", - "integrity": "sha512-wJOK31t/Y/Km6B5ULF/k2RmQB/6MXSN/hMuCiYsLMapFT+86mBlY8cXytYXtLS8afRKpuNy29EY+O6ovfjz6Ig==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-logger/-/middleware-logger-3.292.0.tgz", + "integrity": "sha512-yZNY1XYmG3NG+uonET7jzKXNiwu61xm/ZZ6i/l51SusuaYN+qQtTAhOFsieQqTehF9kP4FzbsWgPDwD8ZZX9lw==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2363,32 +673,15 @@ } }, "@aws-sdk/middleware-recursion-detection": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.290.0.tgz", - "integrity": "sha512-m8Y7SE3NfVTyGubiRhueyHF7uqC5dCbD1bSLgVjvrSjO2yEL0Dv9QR1ad7a+p5ilS+Fq3RnOu1VeujfTHy0qRQ==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.292.0.tgz", + "integrity": "sha512-kA3VZpPko0Zqd7CYPTKAxhjEv0HJqFu2054L04dde1JLr43ro+2MTdX7vsHzeAFUVRphqatFFofCumvXmU6Mig==", "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-sdk/protocol-http": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.290.0.tgz", - "integrity": "sha512-3VHbfmo7vaA/0ugJedjwyK85MT+OKQanz7ktUnAONH5KdG2/gPpa9ZSTyfK9kCVFin93YzC3pznZgr6oNYgGgg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -2397,15 +690,15 @@ } }, "@aws-sdk/middleware-retry": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-retry/-/middleware-retry-3.290.0.tgz", - "integrity": "sha512-mvXvYd/3L/f5ZcnFI1Q2hwk0OtzKMmkDfWW1BcoVzK0XHf2aeehbs7xgI92ICEi/5Ali0IG5krv5LqfgO154Sw==", - "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/service-error-classification": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-retry": "3.290.0", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-retry/-/middleware-retry-3.293.0.tgz", + "integrity": "sha512-7tiaz2GzRecNHaZ6YnF+Nrtk3au8qF6oiipf11R7MJiqJ0fkMLnz/iRrlakDziS9qF/a9v+3yxb4W4NHK3f4Tw==", + "requires": { + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/service-error-classification": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/util-middleware": "3.292.0", + "@aws-sdk/util-retry": "3.292.0", "tslib": "^2.3.1", "uuid": "^8.3.2" }, @@ -2423,15 +716,15 @@ } }, "@aws-sdk/middleware-sdk-sts": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.290.0.tgz", - "integrity": "sha512-NaYnDhFtmz/e9jNBNeY10A4AldCvjF46ZfeIWoBUsk/4qDlSP9kaCjTufEjNf/zMTtYzGiP/FUtLS7T6tfXdoQ==", - "requires": { - "@aws-sdk/middleware-signing": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.292.0.tgz", + "integrity": "sha512-GN5ZHEqXZqDi+HkVbaXRX9HaW/vA5rikYpWKYsmxTUZ7fB7ijvEO3co3lleJv2C+iGYRtUIHC4wYNB5xgoTCxg==", + "requires": { + "@aws-sdk/middleware-signing": "3.292.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/signature-v4": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2443,11 +736,11 @@ } }, "@aws-sdk/middleware-serde": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-serde/-/middleware-serde-3.290.0.tgz", - "integrity": "sha512-lZCKlfJzosi3cVx02RKRTVvbAijHTfd16EiSyKRsQOF2rCu7Qt4LzygIlqUonCeHG6eSqOMMf7LAJ22IHafBbw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-serde/-/middleware-serde-3.292.0.tgz", + "integrity": "sha512-6hN9mTQwSvV8EcGvtXbS/MpK7WMCokUku5Wu7X24UwCNMVkoRHLIkYcxHcvBTwttuOU0d8hph1/lIX4dkLwkQw==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2459,15 +752,15 @@ } }, "@aws-sdk/middleware-signing": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.290.0.tgz", - "integrity": "sha512-mEJZQrbXkOTI+BdFlpAd1CleVJL8B7qayANMNj9nrZqvZ7HzVDLEkNaJqFz9JFizYTfZC2ZjtATPrSiYDvFEfg==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-middleware": "3.290.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-signing/-/middleware-signing-3.292.0.tgz", + "integrity": "sha512-GVfoSjDjEQ4TaO6x9MffyP3uRV+2KcS5FtexLCYOM9pJcnE9tqq9FJOrZ1xl1g+YjUVKxo4x8lu3tpEtIb17qg==", + "requires": { + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/signature-v4": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/util-middleware": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2479,9 +772,9 @@ } }, "@aws-sdk/middleware-stack": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-stack/-/middleware-stack-3.290.0.tgz", - "integrity": "sha512-25iC/7oAokRfxixGkDjBSIAkNwtx2kcO+xMoDczFus9UrlOr2pBY0IXbPn6bB56q2zwsBTHcmMTn0H7FJSIQmw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-stack/-/middleware-stack-3.292.0.tgz", + "integrity": "sha512-WdQpRkuMysrEwrkByCM1qCn2PPpFGGQ2iXqaFha5RzCdZDlxJni9cVNb6HzWUcgjLEYVTXCmOR9Wxm3CNW44Qg==", "requires": { "tslib": "^2.3.1" }, @@ -2494,12 +787,13 @@ } }, "@aws-sdk/middleware-user-agent": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.290.0.tgz", - "integrity": "sha512-ZR49PPra3LtqZBmXAtV8YrUSrkVG0hPBICE8cma/wMwbKGHa0G+Xu4pOZP0oQXs5XeGu1cs/Nx3AOJ2fgaMjhQ==", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.293.0.tgz", + "integrity": "sha512-gZ7/e6XwpKk9mvgA78q4Ffc796jTn02TUKx2qMDnkLVbeJXBNN2jnvYEKq8v70+o7fd/ALRudg8gBDmkkhM/Hw==", "requires": { - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/util-endpoints": "3.293.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2511,13 +805,13 @@ } }, "@aws-sdk/node-config-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-config-provider/-/node-config-provider-3.290.0.tgz", - "integrity": "sha512-dQLnyCy5iT7Q5Ot2JOciNH9WkaixWwmEnvW6nBa6febzAYZVy78sfJOOP1EZ7ClG1aIhrsAN7/7wPebpn/Peiw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/node-config-provider/-/node-config-provider-3.292.0.tgz", + "integrity": "sha512-S3NnC9dQ5GIbJYSDIldZb4zdpCOEua1tM7bjYL3VS5uqCEM93kIi/o/UkIUveMp/eqTS2LJa5HjNIz5Te6je0A==", "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/shared-ini-file-loader": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2529,14 +823,14 @@ } }, "@aws-sdk/node-http-handler": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-http-handler/-/node-http-handler-3.290.0.tgz", - "integrity": "sha512-HfzuzdpAJpO/ob9DQ3aEB/WmPCS5vZOic9T4TtSCmRd5e3+xdMtK/MQUizp8XkbUGBat7jPmcV13Gy4YmwfAuw==", - "requires": { - "@aws-sdk/abort-controller": "3.290.0", - "@aws-sdk/protocol-http": "3.290.0", - "@aws-sdk/querystring-builder": "3.290.0", - "@aws-sdk/types": "3.290.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/node-http-handler/-/node-http-handler-3.292.0.tgz", + "integrity": "sha512-L/E3UDSwXLXjt1XWWh0RBD55F+aZI1AEdPwdES9i1PjnZLyuxuDhEDptVibNN56+I9/4Q3SbmuVRVlOD0uzBag==", + "requires": { + "@aws-sdk/abort-controller": "3.292.0", + "@aws-sdk/protocol-http": "3.292.0", + "@aws-sdk/querystring-builder": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2548,11 +842,11 @@ } }, "@aws-sdk/property-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.290.0.tgz", - "integrity": "sha512-2Zrh6/KecmiZ/cKVaeDtHRAfyOnAEfwJsgxfFugs3RxjJtYr0AbYJTF+mYp3f8Xc7DCjdxR055axo9TCTBSrwg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.292.0.tgz", + "integrity": "sha512-dHArSvsiqhno/g55N815gXmAMrmN8DP7OeFNqJ4wJG42xsF2PFN3DAsjIuHuXMwu+7A3R1LHqIpvv0hA9KeoJQ==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2564,11 +858,11 @@ } }, "@aws-sdk/protocol-http": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.290.0.tgz", - "integrity": "sha512-3VHbfmo7vaA/0ugJedjwyK85MT+OKQanz7ktUnAONH5KdG2/gPpa9ZSTyfK9kCVFin93YzC3pznZgr6oNYgGgg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/protocol-http/-/protocol-http-3.292.0.tgz", + "integrity": "sha512-NLi4fq3k41aXIh1I97yX0JTy+3p6aW1NdwFwdMa674z86QNfb4SfRQRZBQe9wEnAZ/eWHVnlKIuII+U1URk/Kg==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2580,12 +874,12 @@ } }, "@aws-sdk/querystring-builder": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-builder/-/querystring-builder-3.290.0.tgz", - "integrity": "sha512-7q8x8ux1RCUxUolqxsXfSbCObyMzvSwfJb9GgZ8rDi2U61l8W760a9ejHzizfQJvdldRSwFqmynkRAqYbvKixg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-builder/-/querystring-builder-3.292.0.tgz", + "integrity": "sha512-XElIFJaReIm24eEvBtV2dOtZvcm3gXsGu/ftG8MLJKbKXFKpAP1q+K6En0Bs7/T88voKghKdKpKT+eZUWgTqlg==", "requires": { - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/util-uri-escape": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2597,11 +891,11 @@ } }, "@aws-sdk/querystring-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.290.0.tgz", - "integrity": "sha512-8QPDihJKSFYFphxUl5+FfXMQowhAoHuDeoqd1ce3byL0bm7k8emcGfiYD6QGxuDlpno+F4O1/Mz+e+cwNCdPVA==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.292.0.tgz", + "integrity": "sha512-iTYpYo7a8X9RxiPbjjewIpm6XQPx2EOcF1dWCPRII9EFlmZ4bwnX+PDI36fIo9oVs8TIKXmwNGODU9nsg7CSAw==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2613,16 +907,16 @@ } }, "@aws-sdk/service-error-classification": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/service-error-classification/-/service-error-classification-3.290.0.tgz", - "integrity": "sha512-QP+QgL5Gm6RKl4KGwTRyG1kw0SxBbcmp/a/yhywVHmRI0/+4VsL+cooTqtjFr3xVmKoCX+/JZZ8P96VGFvRSZA==" + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/service-error-classification/-/service-error-classification-3.292.0.tgz", + "integrity": "sha512-X1k3sixCeC45XSNHBe+kRBQBwPDyTFtFITb8O5Qw4dS9XWGhrUJT4CX0qE5aj8qP3F9U5nRizs9c2mBVVP0Caw==" }, "@aws-sdk/shared-ini-file-loader": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.290.0.tgz", - "integrity": "sha512-kvLW5rwr4lwHdwkYnoHYpFVfWwZYwQO44eRnkrDnyvvhZTcCH3rBLApu6uvomnL+Ep4bEJ1anDKt3WywlGg5Qw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.292.0.tgz", + "integrity": "sha512-Av2TTYg1Jig2kbkD56ybiqZJB6vVrYjv1W5UQwY/q3nA/T2mcrgQ20ByCOt5Bv9VvY7FSgC+znj+L4a7RLGmBg==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2634,16 +928,16 @@ } }, "@aws-sdk/signature-v4": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.290.0.tgz", - "integrity": "sha512-SUMflc8b8PC0ITV3AdYBSlTcn4oFjumBAPNNXBLKIpifQ1l7ZufFIulDPlqeouXTDwsuCVINAwE0DbItDe/7Qw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-hex-encoding": "3.201.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "@aws-sdk/util-utf8": "3.254.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.292.0.tgz", + "integrity": "sha512-+rw47VY5mvBecn13tDQTl1ipGWg5tE63faWgmZe68HoBL87ZiDzsd7bUKOvjfW21iMgWlwAppkaNNQayYRb2zg==", + "requires": { + "@aws-sdk/is-array-buffer": "3.292.0", + "@aws-sdk/types": "3.292.0", + "@aws-sdk/util-hex-encoding": "3.292.0", + "@aws-sdk/util-middleware": "3.292.0", + "@aws-sdk/util-uri-escape": "3.292.0", + "@aws-sdk/util-utf8": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2655,12 +949,12 @@ } }, "@aws-sdk/smithy-client": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.290.0.tgz", - "integrity": "sha512-MDa+BJqM1FP2HYugVAscufoLJuapEdUTZPoyERVGfUEznKfKH33QXRoeqW1wzUNyhcxFONHLnXp1aYFBtnLx7g==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/smithy-client/-/smithy-client-3.292.0.tgz", + "integrity": "sha512-S8PKzjPkZ6SXYZuZiU787dMsvQ0d/LFEhw2OI4Oe2An9Fc2IwJ2FYukyHoQJOV2tV0DiuMebPo7eMyQyjKElvA==", "requires": { - "@aws-sdk/middleware-stack": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/middleware-stack": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2672,43 +966,17 @@ } }, "@aws-sdk/token-providers": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.290.0.tgz", - "integrity": "sha512-fc5y8WH7RVwoaUaEdK3cRanxgHShZKAPZ0rCtHjoLURF8IjZIrn3AaZqV8YTgAAmIKNVC+argpj1G+suqXEB/Q==", - "requires": { - "@aws-sdk/client-sso-oidc": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.293.0.tgz", + "integrity": "sha512-Ly5pdUZJcufNHTovmA0XjyUV6Qth89oK3VHSnrNbVYKFCDvApF4tuR8lBYayn7vEWrdlkGCnfJu42yN71NPfDw==", + "requires": { + "@aws-sdk/client-sso-oidc": "3.293.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/shared-ini-file-loader": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-sdk/property-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.290.0.tgz", - "integrity": "sha512-2Zrh6/KecmiZ/cKVaeDtHRAfyOnAEfwJsgxfFugs3RxjJtYr0AbYJTF+mYp3f8Xc7DCjdxR055axo9TCTBSrwg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/shared-ini-file-loader": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.290.0.tgz", - "integrity": "sha512-kvLW5rwr4lwHdwkYnoHYpFVfWwZYwQO44eRnkrDnyvvhZTcCH3rBLApu6uvomnL+Ep4bEJ1anDKt3WywlGg5Qw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -2717,9 +985,9 @@ } }, "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.292.0.tgz", + "integrity": "sha512-1teYAY2M73UXZxMAxqZxVS2qwXjQh0OWtt7qyLfha0TtIk/fZ1hRwFgxbDCHUFcdNBSOSbKH/ESor90KROXLCQ==", "requires": { "tslib": "^2.3.1" }, @@ -2732,12 +1000,12 @@ } }, "@aws-sdk/url-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.290.0.tgz", - "integrity": "sha512-19EAlyH4LyNMbAROE6KSuhFKhOwl67kciDavPjS8gFiHr6slon3oqXfz10+uzKf/pJKuY6qOpkUb9h7LnF4bFQ==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.292.0.tgz", + "integrity": "sha512-NZeAuZCk1x6TIiWuRfbOU6wHPBhf0ly2qOHzWut4BCH+b4RrDmFF8EmXcH1auEfGhE7yRyR6XqIN0t3S+hYACA==", "requires": { - "@aws-sdk/querystring-parser": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/querystring-parser": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2749,31 +1017,14 @@ } }, "@aws-sdk/util-base64": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-base64/-/util-base64-3.208.0.tgz", - "integrity": "sha512-PQniZph5A6N7uuEOQi+1hnMz/FSOK/8kMFyFO+4DgA1dZ5pcKcn5wiFwHkcTb/BsgVqQa3Jx0VHNnvhlS8JyTg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-base64/-/util-base64-3.292.0.tgz", + "integrity": "sha512-zjNCwNdy617yFvEjZorepNWXB2sQCVfsShCwFy/kIQ5iW5tT2jQKaqc0K77diU9atkooxw9p1W9m9sOgrkOFNw==", "requires": { - "@aws-sdk/util-buffer-from": "3.208.0", + "@aws-sdk/util-buffer-from": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-sdk/is-array-buffer": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.201.0.tgz", - "integrity": "sha512-UPez5qLh3dNgt0DYnPD/q0mVJY84rA17QE26hVNOW3fAji8W2wrwrxdacWOxyXvlxWsVRcKmr+lay1MDqpAMfg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-buffer-from": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.208.0.tgz", - "integrity": "sha512-7L0XUixNEFcLUGPeBF35enCvB9Xl+K6SQsmbrPk1P3mlV9mguWSDQqbOBwY1Ir0OVbD6H/ZOQU7hI/9RtRI0Zw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "tslib": "^2.3.1" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -2782,9 +1033,9 @@ } }, "@aws-sdk/util-body-length-browser": { - "version": "3.188.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.188.0.tgz", - "integrity": "sha512-8VpnwFWXhnZ/iRSl9mTf+VKOX9wDE8QtN4bj9pBfxwf90H1X7E8T6NkiZD3k+HubYf2J94e7DbeHs7fuCPW5Qg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-browser/-/util-body-length-browser-3.292.0.tgz", + "integrity": "sha512-Wd/BM+JsMiKvKs/bN3z6TredVEHh2pKudGfg3CSjTRpqFpOG903KDfyHBD42yg5PuCHoHoewJvTPKwgn7/vhaw==", "requires": { "tslib": "^2.3.1" }, @@ -2797,9 +1048,9 @@ } }, "@aws-sdk/util-body-length-node": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-node/-/util-body-length-node-3.208.0.tgz", - "integrity": "sha512-3zj50e5g7t/MQf53SsuuSf0hEELzMtD8RX8C76f12OSRo2Bca4FLLYHe0TZbxcfQHom8/hOaeZEyTyMogMglqg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-body-length-node/-/util-body-length-node-3.292.0.tgz", + "integrity": "sha512-BBgipZ2P6RhogWE/qj0oqpdlyd3iSBYmb+aD/TBXwB2lA/X8A99GxweBd/kp06AmcJRoMS9WIXgbWkiiBlRlSA==", "requires": { "tslib": "^2.3.1" }, @@ -2812,11 +1063,11 @@ } }, "@aws-sdk/util-buffer-from": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.208.0.tgz", - "integrity": "sha512-7L0XUixNEFcLUGPeBF35enCvB9Xl+K6SQsmbrPk1P3mlV9mguWSDQqbOBwY1Ir0OVbD6H/ZOQU7hI/9RtRI0Zw==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.292.0.tgz", + "integrity": "sha512-RxNZjLoXNxHconH9TYsk5RaEBjSgTtozHeyIdacaHPj5vlQKi4hgL2hIfKeeNiAfQEVjaUFF29lv81xpNMzVMQ==", "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", + "@aws-sdk/is-array-buffer": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -2828,9 +1079,9 @@ } }, "@aws-sdk/util-config-provider": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-config-provider/-/util-config-provider-3.208.0.tgz", - "integrity": "sha512-DSRqwrERUsT34ug+anlMBIFooBEGwM8GejC7q00Y/9IPrQy50KnG5PW2NiTjuLKNi7pdEOlwTSEocJE15eDZIg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-config-provider/-/util-config-provider-3.292.0.tgz", + "integrity": "sha512-t3noYll6bPRSxeeNNEkC5czVjAiTPcsq00OwfJ2xyUqmquhLEfLwoJKmrT1uP7DjIEXdUtfoIQ2jWiIVm/oO5A==", "requires": { "tslib": "^2.3.1" }, @@ -2843,33 +1094,16 @@ } }, "@aws-sdk/util-defaults-mode-browser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-defaults-mode-browser/-/util-defaults-mode-browser-3.290.0.tgz", - "integrity": "sha512-8Mt6/OA465uw1wSA/LCCd+6IjeIUTAbg2GiqfSBCBMNJNuqPwPXuWVjg6kBd1eEChyEtAuoLTygMefaBywg4HQ==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-defaults-mode-browser/-/util-defaults-mode-browser-3.292.0.tgz", + "integrity": "sha512-7+zVUlMGfa8/KT++9humHo6IDxTnxMCmWUj5jVNlkpk6h7Ecmppf7aXotviyVIA43lhtz0p2AErs0N0ekEUK+w==", "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/types": "3.292.0", "bowser": "^2.11.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-sdk/property-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.290.0.tgz", - "integrity": "sha512-2Zrh6/KecmiZ/cKVaeDtHRAfyOnAEfwJsgxfFugs3RxjJtYr0AbYJTF+mYp3f8Xc7DCjdxR055axo9TCTBSrwg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -2878,136 +1112,18 @@ } }, "@aws-sdk/util-defaults-mode-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-defaults-mode-node/-/util-defaults-mode-node-3.290.0.tgz", - "integrity": "sha512-9c0jS7w1aZxfKkFXlTjp80QaKYKnutMmlsfP+/YXN9+s3yvwFcnsENMTNg5YVvkZa9e+Rhw/ySxVKTEJ7n/SOA==", - "requires": { - "@aws-sdk/config-resolver": "3.290.0", - "@aws-sdk/credential-provider-imds": "3.290.0", - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-defaults-mode-node/-/util-defaults-mode-node-3.292.0.tgz", + "integrity": "sha512-SSIw85eF4BVs0fOJRyshT+R3b/UmBPhiVKCUZm2rq6+lIGkDPiSwQU3d/80AhXtiL5SFT/IzAKKgQd8qMa7q3A==", + "requires": { + "@aws-sdk/config-resolver": "3.292.0", + "@aws-sdk/credential-provider-imds": "3.292.0", + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/property-provider": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-sdk/config-resolver": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/config-resolver/-/config-resolver-3.290.0.tgz", - "integrity": "sha512-Ovskri6IR4iBK0+3ttgjPSgOUEC+fd5tqRN5JlPCCZ9VwqwF/z26yYC4fAPaMUAJwPVRFeYYzQoszXGoxPyG7g==", - "requires": { - "@aws-sdk/signature-v4": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-config-provider": "3.208.0", - "@aws-sdk/util-middleware": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/credential-provider-imds": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-imds/-/credential-provider-imds-3.290.0.tgz", - "integrity": "sha512-PkYEs7zzUVWnhkR9TlU1ORDcCnkD7qoqR1loXXSZc+EIOX9M7f+sXGLtCXVl9wV1Ekx3a5Tjud+aQcOJjjFePA==", - "requires": { - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/url-parser": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/is-array-buffer": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.201.0.tgz", - "integrity": "sha512-UPez5qLh3dNgt0DYnPD/q0mVJY84rA17QE26hVNOW3fAji8W2wrwrxdacWOxyXvlxWsVRcKmr+lay1MDqpAMfg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/node-config-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/node-config-provider/-/node-config-provider-3.290.0.tgz", - "integrity": "sha512-dQLnyCy5iT7Q5Ot2JOciNH9WkaixWwmEnvW6nBa6febzAYZVy78sfJOOP1EZ7ClG1aIhrsAN7/7wPebpn/Peiw==", - "requires": { - "@aws-sdk/property-provider": "3.290.0", - "@aws-sdk/shared-ini-file-loader": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/property-provider": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/property-provider/-/property-provider-3.290.0.tgz", - "integrity": "sha512-2Zrh6/KecmiZ/cKVaeDtHRAfyOnAEfwJsgxfFugs3RxjJtYr0AbYJTF+mYp3f8Xc7DCjdxR055axo9TCTBSrwg==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/querystring-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/querystring-parser/-/querystring-parser-3.290.0.tgz", - "integrity": "sha512-8QPDihJKSFYFphxUl5+FfXMQowhAoHuDeoqd1ce3byL0bm7k8emcGfiYD6QGxuDlpno+F4O1/Mz+e+cwNCdPVA==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/shared-ini-file-loader": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/shared-ini-file-loader/-/shared-ini-file-loader-3.290.0.tgz", - "integrity": "sha512-kvLW5rwr4lwHdwkYnoHYpFVfWwZYwQO44eRnkrDnyvvhZTcCH3rBLApu6uvomnL+Ep4bEJ1anDKt3WywlGg5Qw==", - "requires": { - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/signature-v4": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/signature-v4/-/signature-v4-3.290.0.tgz", - "integrity": "sha512-SUMflc8b8PC0ITV3AdYBSlTcn4oFjumBAPNNXBLKIpifQ1l7ZufFIulDPlqeouXTDwsuCVINAwE0DbItDe/7Qw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "@aws-sdk/types": "3.290.0", - "@aws-sdk/util-hex-encoding": "3.201.0", - "@aws-sdk/util-middleware": "3.290.0", - "@aws-sdk/util-uri-escape": "3.201.0", - "@aws-sdk/util-utf8": "3.254.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/url-parser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/url-parser/-/url-parser-3.290.0.tgz", - "integrity": "sha512-19EAlyH4LyNMbAROE6KSuhFKhOwl67kciDavPjS8gFiHr6slon3oqXfz10+uzKf/pJKuY6qOpkUb9h7LnF4bFQ==", - "requires": { - "@aws-sdk/querystring-parser": "3.290.0", - "@aws-sdk/types": "3.290.0", - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-hex-encoding": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.201.0.tgz", - "integrity": "sha512-7t1vR1pVxKx0motd3X9rI3m/xNp78p3sHtP5yo4NP4ARpxyJ0fokBomY8ScaH2D/B+U5o9ARxldJUdMqyBlJcA==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-uri-escape": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.201.0.tgz", - "integrity": "sha512-TeTWbGx4LU2c5rx0obHeDFeO9HvwYwQtMh1yniBz00pQb6Qt6YVOETVQikRZ+XRQwEyCg/dA375UplIpiy54mA==", - "requires": { - "tslib": "^2.3.1" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -3016,22 +1132,14 @@ } }, "@aws-sdk/util-endpoints": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.290.0.tgz", - "integrity": "sha512-nDdSyWdxYEPE84qABQKasIFhm6oWjhiyM92g8zsHTqzrn67a4caA72FTL6cztgJOEd5GWvHn6r1BnRVhkG68Qw==", + "version": "3.293.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-endpoints/-/util-endpoints-3.293.0.tgz", + "integrity": "sha512-R/99aNV49Refpv5guiUjEUrZYlvnfaNBniB+/ZtMO3ixxUopapssCrUivuJrmhccmrYaTCZw7dRzIWjU1jJhKg==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-sdk/types": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.290.0.tgz", - "integrity": "sha512-uQLD9tLv8Q87CwrSB/taUoQ8wkGeFb1Gygc+kt5oClfMFP9HYzu944kW/1R7/J5LtBLT1QFYccd4gz6eOUNlsw==", - "requires": { - "tslib": "^2.3.1" - } - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -3040,9 +1148,9 @@ } }, "@aws-sdk/util-hex-encoding": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.201.0.tgz", - "integrity": "sha512-7t1vR1pVxKx0motd3X9rI3m/xNp78p3sHtP5yo4NP4ARpxyJ0fokBomY8ScaH2D/B+U5o9ARxldJUdMqyBlJcA==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-hex-encoding/-/util-hex-encoding-3.292.0.tgz", + "integrity": "sha512-qBd5KFIUywQ3qSSbj814S2srk0vfv8A6QMI+Obs1y2LHZFdQN5zViptI4UhXhKOHe+NnrHWxSuLC/LMH6q3SmA==", "requires": { "tslib": "^2.3.1" }, @@ -3055,24 +1163,24 @@ } }, "@aws-sdk/util-locate-window": { - "version": "3.23.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.23.0.tgz", - "integrity": "sha512-mM8kWW7SWIxCshkNllpYqCQi5SzwJ+sv5nURhtquOB5/H3qGqZm0V5lUE3qpE1AYmqKwk6qbGUy1woFn1T5nrw==", + "version": "3.295.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-locate-window/-/util-locate-window-3.295.0.tgz", + "integrity": "sha512-d/s+zhUx5Kh4l/ecMP/TBjzp1GR/g89Q4nWH6+wH5WgdHsK+LG+vmsk6mVNuP/8wsCofYG4NBqp5Ulbztbm9QA==", "requires": { - "tslib": "^2.3.0" + "tslib": "^2.5.0" }, "dependencies": { "tslib": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz", - "integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==" + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", + "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" } } }, "@aws-sdk/util-middleware": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-middleware/-/util-middleware-3.290.0.tgz", - "integrity": "sha512-lXGM9YSqwZgCeEPltc++jiGyZ/FLuh62IjrWSIVSL/FvkL6D8KSKNBd7Ab/KDDu5jt4iP5UZ4k3SGVk6monUZg==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-middleware/-/util-middleware-3.292.0.tgz", + "integrity": "sha512-KjhS7flfoBKDxbiBZjLjMvEizXgjfQb7GQEItgzGoI9rfGCmZtvqCcqQQoIlxb8bIzGRggAUHtBGWnlLbpb+GQ==", "requires": { "tslib": "^2.3.1" }, @@ -3085,19 +1193,14 @@ } }, "@aws-sdk/util-retry": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-retry/-/util-retry-3.290.0.tgz", - "integrity": "sha512-UjyUEguu2upaBvDJkeSUQPE4ryBTA7JhPyl6M7XA6rFSRtU5+1NI8KknSNw46buviNit0Yu0E6TzxNQyS70hKA==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-retry/-/util-retry-3.292.0.tgz", + "integrity": "sha512-JEHyF7MpVeRF5uR4LDYgpOKcFpOPiAj8TqN46SVOQQcL1K+V7cSr7O7N7J6MwJaN9XOzAcBadeIupMm7/BFbgw==", "requires": { - "@aws-sdk/service-error-classification": "3.290.0", + "@aws-sdk/service-error-classification": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { - "@aws-sdk/service-error-classification": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/service-error-classification/-/service-error-classification-3.290.0.tgz", - "integrity": "sha512-QP+QgL5Gm6RKl4KGwTRyG1kw0SxBbcmp/a/yhywVHmRI0/+4VsL+cooTqtjFr3xVmKoCX+/JZZ8P96VGFvRSZA==" - }, "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -3106,9 +1209,9 @@ } }, "@aws-sdk/util-uri-escape": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.201.0.tgz", - "integrity": "sha512-TeTWbGx4LU2c5rx0obHeDFeO9HvwYwQtMh1yniBz00pQb6Qt6YVOETVQikRZ+XRQwEyCg/dA375UplIpiy54mA==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-uri-escape/-/util-uri-escape-3.292.0.tgz", + "integrity": "sha512-hOQtUMQ4VcQ9iwKz50AoCp1XBD5gJ9nly/gJZccAM7zSA5mOO8RRKkbdonqquVHxrO0CnYgiFeCh3V35GFecUw==", "requires": { "tslib": "^2.3.1" }, @@ -3121,11 +1224,11 @@ } }, "@aws-sdk/util-user-agent-browser": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.290.0.tgz", - "integrity": "sha512-I+B5ooKRYQ9jHcdg7TOf20LlTfcBUlCJQ2AAqI1ukmJqal22OD1CtC1E+/XbplpU5mxRs4s2UQbxNaPA0yIrBA==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.292.0.tgz", + "integrity": "sha512-dld+lpC3QdmTQHdBWJ0WFDkXDSrJgfz03q6mQ8+7H+BC12ZhT0I0g9iuvUjolqy7QR00OxOy47Y9FVhq8EC0Gg==", "requires": { - "@aws-sdk/types": "3.290.0", + "@aws-sdk/types": "3.292.0", "bowser": "^2.11.0", "tslib": "^2.3.1" }, @@ -3138,12 +1241,12 @@ } }, "@aws-sdk/util-user-agent-node": { - "version": "3.290.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.290.0.tgz", - "integrity": "sha512-7juKgEMqpa0il6jZmiBKGDJslM4UIKX1bvhlqkSvvPfV3zFdfi0V2xavh68GfelWduBBkYLGRjsLunqzw64f8A==", + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.292.0.tgz", + "integrity": "sha512-f+NfIMal5E61MDc5WGhUEoicr7b1eNNhA+GgVdSB/Hg5fYhEZvFK9RZizH5rrtsLjjgcr9nPYSR7/nDKCJLumw==", "requires": { - "@aws-sdk/node-config-provider": "3.290.0", - "@aws-sdk/types": "3.290.0", + "@aws-sdk/node-config-provider": "3.292.0", + "@aws-sdk/types": "3.292.0", "tslib": "^2.3.1" }, "dependencies": { @@ -3154,32 +1257,15 @@ } } }, - "@aws-sdk/util-utf8": { - "version": "3.254.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-utf8/-/util-utf8-3.254.0.tgz", - "integrity": "sha512-14Kso/eIt5/qfIBmhEL9L1IfyUqswjSTqO2mY7KOzUZ9SZbwn3rpxmtkhmATkRjD7XIlLKaxBkI7tU9Zjzj8Kw==", - "requires": { - "@aws-sdk/util-buffer-from": "3.208.0", - "tslib": "^2.3.1" - }, - "dependencies": { - "@aws-sdk/is-array-buffer": { - "version": "3.201.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/is-array-buffer/-/is-array-buffer-3.201.0.tgz", - "integrity": "sha512-UPez5qLh3dNgt0DYnPD/q0mVJY84rA17QE26hVNOW3fAji8W2wrwrxdacWOxyXvlxWsVRcKmr+lay1MDqpAMfg==", - "requires": { - "tslib": "^2.3.1" - } - }, - "@aws-sdk/util-buffer-from": { - "version": "3.208.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-buffer-from/-/util-buffer-from-3.208.0.tgz", - "integrity": "sha512-7L0XUixNEFcLUGPeBF35enCvB9Xl+K6SQsmbrPk1P3mlV9mguWSDQqbOBwY1Ir0OVbD6H/ZOQU7hI/9RtRI0Zw==", - "requires": { - "@aws-sdk/is-array-buffer": "3.201.0", - "tslib": "^2.3.1" - } - }, + "@aws-sdk/util-utf8": { + "version": "3.292.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-utf8/-/util-utf8-3.292.0.tgz", + "integrity": "sha512-FPkj+Z59/DQWvoVu2wFaRncc3KVwe/pgK3MfVb0Lx+Ibey5KUx+sNpJmYcVYHUAe/Nv/JeIpOtYuC96IXOnI6w==", + "requires": { + "@aws-sdk/util-buffer-from": "3.292.0", + "tslib": "^2.3.1" + }, + "dependencies": { "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", @@ -3188,17 +1274,17 @@ } }, "@aws-sdk/util-utf8-browser": { - "version": "3.23.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.23.0.tgz", - "integrity": "sha512-fSB95AKnvCnAbCd7o0xLbErfAgD9wnLCaEu23AgfGAiaG3nFF8Z2+wtjebU/9Z4RI9d/x83Ho/yguRnJdkMsPA==", + "version": "3.259.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz", + "integrity": "sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==", "requires": { - "tslib": "^2.3.0" + "tslib": "^2.3.1" }, "dependencies": { "tslib": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.0.tgz", - "integrity": "sha512-N82ooyxVNm6h1riLCoyS9e3fuJ3AMG2zIZs2Gd1ATcSFjSA23Q0fzjjZeh0jbJvWVDZ0cJT8yaNNaaXHzueNjg==" + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", + "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" } } }, @@ -3275,12 +1361,6 @@ "ms": "2.1.2" } }, - "json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -3907,6 +1987,24 @@ "version": "1.2.6", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" + }, + "tar": { + "version": "6.1.11", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", + "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", + "requires": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^3.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" } } }, @@ -4049,6 +2147,12 @@ "integrity": "sha512-QD1PhQk+s31P1ixsX0H0Suoupp3VMXzIVMSwobR3F3MSUO2YCV0B7xqLcUw/Bh8yuvd3LhpyqLQWTNcRmp6IdQ==", "dev": true }, + "@hapi/bourne": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-1.3.2.tgz", + "integrity": "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==", + "dev": true + }, "@hapi/hoek": { "version": "9.2.0", "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-9.2.0.tgz", @@ -4066,12 +2170,6 @@ "@hapi/topo": "3.x.x" }, "dependencies": { - "@hapi/bourne": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/@hapi/bourne/-/bourne-1.3.2.tgz", - "integrity": "sha512-1dVNHT76Uu5N3eJNTYcvxee+jzX4Z9lfciqRRHCU27ihbUcYi+iSc2iml5Ke1LXe1SyJCLA0+14Jh4tXJgOppA==", - "dev": true - }, "@hapi/hoek": { "version": "8.5.1", "resolved": "https://registry.npmjs.org/@hapi/hoek/-/hoek-8.5.1.tgz", @@ -4618,7 +2716,6 @@ "version": "1.0.10", "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz", "integrity": "sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==", - "dev": true, "requires": { "detect-libc": "^2.0.0", "https-proxy-agent": "^5.0.0", @@ -4634,14 +2731,12 @@ "detect-libc": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", - "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", - "dev": true + "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==" }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, "requires": { "yallist": "^4.0.0" } @@ -4650,7 +2745,6 @@ "version": "7.3.8", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.8.tgz", "integrity": "sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==", - "dev": true, "requires": { "lru-cache": "^6.0.0" } @@ -4658,8 +2752,7 @@ "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" } } }, @@ -4982,208 +3075,6 @@ } } }, - "@serverless/utils": { - "version": "6.8.2", - "resolved": "https://registry.npmjs.org/@serverless/utils/-/utils-6.8.2.tgz", - "integrity": "sha512-FW8zdG8OPoF6qgyutiMhz4m/5SxbQjoQdbaGcW3wU6xe3QzQh41Hif7I3Xuu4J62CvxiWuz19sxNDJz2mTcskw==", - "dev": true, - "requires": { - "archive-type": "^4.0.0", - "chalk": "^4.1.2", - "ci-info": "^3.5.0", - "cli-progress-footer": "^2.3.2", - "content-disposition": "^0.5.4", - "d": "^1.0.1", - "decompress": "^4.2.1", - "event-emitter": "^0.3.5", - "ext": "^1.7.0", - "ext-name": "^5.0.0", - "file-type": "^16.5.4", - "filenamify": "^4.3.0", - "get-stream": "^6.0.1", - "got": "^11.8.5", - "inquirer": "^8.2.5", - "js-yaml": "^4.1.0", - "jwt-decode": "^3.1.2", - "lodash": "^4.17.21", - "log": "^6.3.1", - "log-node": "^8.0.3", - "make-dir": "^3.1.0", - "memoizee": "^0.4.15", - "ncjsm": "^4.3.1", - "node-fetch": "^2.6.7", - "open": "^8.4.0", - "p-event": "^4.2.0", - "supports-color": "^8.1.1", - "timers-ext": "^0.1.7", - "type": "^2.7.2", - "uni-global": "^1.0.0", - "uuid": "^8.3.2", - "write-file-atomic": "^4.0.2" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dev": true, - "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "dependencies": { - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } - } - }, - "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "requires": { - "restore-cursor": "^3.1.0" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "inquirer": { - "version": "8.2.5", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.5.tgz", - "integrity": "sha512-QAgPDQMEgrDssk1XiwwHoOGYF9BAbUcc1+j+FhEvaOt8/cKRqyLn0U5qA6F74fGhTMGxf92pOvPBeh29jQJDTQ==", - "dev": true, - "requires": { - "ansi-escapes": "^4.2.1", - "chalk": "^4.1.1", - "cli-cursor": "^3.1.0", - "cli-width": "^3.0.0", - "external-editor": "^3.0.3", - "figures": "^3.0.0", - "lodash": "^4.17.21", - "mute-stream": "0.0.8", - "ora": "^5.4.1", - "run-async": "^2.4.0", - "rxjs": "^7.5.5", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0", - "through": "^2.3.6", - "wrap-ansi": "^7.0.0" - } - }, - "js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "requires": { - "argparse": "^2.0.1" - } - }, - "open": { - "version": "8.4.1", - "resolved": "https://registry.npmjs.org/open/-/open-8.4.1.tgz", - "integrity": "sha512-/4b7qZNhv6Uhd7jjnREh1NjnPxlTq+XNWPG88Ydkj5AILcA5m3ajvcg57pB24EQjKv0dK62XnDqk9c/hkIG5Kg==", - "dev": true, - "requires": { - "define-lazy-prop": "^2.0.0", - "is-docker": "^2.1.1", - "is-wsl": "^2.2.0" - } - }, - "rxjs": { - "version": "7.8.0", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.0.tgz", - "integrity": "sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==", - "dev": true, - "requires": { - "tslib": "^2.1.0" - } - }, - "supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - }, - "tslib": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", - "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==", - "dev": true - }, - "uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true - }, - "wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - } - }, - "write-file-atomic": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", - "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", - "dev": true, - "requires": { - "imurmurhash": "^0.1.4", - "signal-exit": "^3.0.7" - } - } - } - }, "@sideway/address": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/@sideway/address/-/address-4.1.2.tgz", @@ -5193,9 +3084,9 @@ } }, "@sideway/formula": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.1.tgz", - "integrity": "sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==" + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@sideway/formula/-/formula-3.0.0.tgz", + "integrity": "sha512-vHe7wZ4NOXVfkoRb8T5otiENVlT7a3IAiw7H5M2+GO+9CDgcVUUsX1zalAztCmwyOr2RUTGJdgB+ZvSVqmdHmg==" }, "@sideway/pinpoint": { "version": "2.0.0", @@ -5415,9 +3306,9 @@ "integrity": "sha512-ebDJ9b0e702Yr7pWgB0jzm+CX4Srzz8RcXtLJDJB+BSccqMa36uyH/zUsSYao5+BD1ytv3k3rPYCq4mAE1hsXA==" }, "@types/aws-lambda": { - "version": "8.10.106", - "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.106.tgz", - "integrity": "sha512-yzgMaql7aW1by1XuhKhovuhLyK/1A60lapFXDXXBeHmoyRGQFO2T8lkL3g8hAhHoW5PEvqPJFWPd8jvXiRnxeQ==", + "version": "8.10.113", + "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.113.tgz", + "integrity": "sha512-JHxKRi7Nz2Bjqp0CKllm9Fl5GZwRfMF+lilSfl12ATyvBx0cRM09ztBVgebeJ8wrjJggHxxjSU2/U+SvSG+K1A==", "dev": true }, "@types/babel__core": { @@ -5461,6 +3352,15 @@ "@babel/types": "^7.3.0" } }, + "@types/bcrypt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/bcrypt/-/bcrypt-5.0.0.tgz", + "integrity": "sha512-agtcFKaruL8TmcvqbndlqHPSJgsolhf/qPWchFlgnW1gECTN/nKbFcoFnvKAQRFfKbh+BO6A3SWdJu9t+xF3Lw==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", @@ -5472,15 +3372,15 @@ } }, "@types/cacheable-request": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz", - "integrity": "sha512-B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", + "integrity": "sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==", "dev": true, "requires": { "@types/http-cache-semantics": "*", - "@types/keyv": "*", + "@types/keyv": "^3.1.4", "@types/node": "*", - "@types/responselike": "*" + "@types/responselike": "^1.0.0" } }, "@types/connect": { @@ -5492,6 +3392,24 @@ "@types/node": "*" } }, + "@types/convict": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/@types/convict/-/convict-6.1.1.tgz", + "integrity": "sha512-R+JLaTvhsD06p4jyjUDtbd5xMtZTRE3c0iI+lrFWZogSVEjgTWPYwvJPVf+t92E+yrlbXa4X4Eg9ro6gPdUt4w==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, + "@types/cookie-parser": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/@types/cookie-parser/-/cookie-parser-1.4.3.tgz", + "integrity": "sha512-CqSKwFwefj4PzZ5n/iwad/bow2hTCh0FlNAeWLtQM3JA/NX/iYagIpWG2cf1bQKQ2c9gU2log5VUCrn7LDOs0w==", + "dev": true, + "requires": { + "@types/express": "*" + } + }, "@types/cookiejar": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/@types/cookiejar/-/cookiejar-2.1.2.tgz", @@ -5506,6 +3424,14 @@ "@types/ms": "*" } }, + "@types/dompurify": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-2.4.0.tgz", + "integrity": "sha512-IDBwO5IZhrKvHFUl+clZxgf3hn2b/lU6H1KaBShPkQyGJUQ0xwebezIPSuiyGwfz1UzJWQl4M7BDxtHtCCPlTg==", + "requires": { + "@types/trusted-types": "*" + } + }, "@types/express": { "version": "4.17.13", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", @@ -5529,6 +3455,15 @@ "@types/range-parser": "*" } }, + "@types/express-session": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/@types/express-session/-/express-session-1.17.5.tgz", + "integrity": "sha512-l0DhkvNVfyUPEEis8fcwbd46VptfA/jmMwHfob2TfDMf3HyPLiB9mKD71LXhz5TMUobODXPD27zXSwtFQLHm+w==", + "dev": true, + "requires": { + "@types/express": "*" + } + }, "@types/glob": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", @@ -5609,6 +3544,12 @@ "@types/node": "*" } }, + "@types/lodash": { + "version": "4.14.186", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.186.tgz", + "integrity": "sha512-eHcVlLXP0c2FlMPm56ITode2AgLMSa6aJ05JTTbYbI+7EMkCEE5qk2E41d5g2lCVTqRe0GnnRFurmlCsDODrPw==", + "dev": true + }, "@types/long": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz", @@ -5716,6 +3657,11 @@ "@types/superagent": "*" } }, + "@types/trusted-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.2.tgz", + "integrity": "sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg==" + }, "@types/validator": { "version": "13.7.1", "resolved": "https://registry.npmjs.org/@types/validator/-/validator-13.7.1.tgz", @@ -6000,8 +3946,7 @@ "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "dev": true + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" }, "accepts": { "version": "1.3.8", @@ -6010,26 +3955,6 @@ "requires": { "mime-types": "~2.1.34", "negotiator": "0.6.3" - }, - "dependencies": { - "mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" - }, - "mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "requires": { - "mime-db": "1.52.0" - } - }, - "negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" - } } }, "acorn": { @@ -6166,8 +4091,7 @@ "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "ansi-styles": { "version": "3.2.1", @@ -6191,8 +4115,7 @@ "aproba": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", - "dev": true + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" }, "archive-type": { "version": "4.0.0", @@ -6215,7 +4138,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "dev": true, "requires": { "delegates": "^1.0.0", "readable-stream": "^3.6.0" @@ -6237,7 +4159,7 @@ "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" }, "array-includes": { "version": "3.1.4", @@ -6426,22 +4348,50 @@ "resolved": "https://registry.npmjs.org/asap/-/asap-2.0.6.tgz", "integrity": "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA==" }, + "asl-path-validator": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/asl-path-validator/-/asl-path-validator-0.11.0.tgz", + "integrity": "sha512-2kfFkqNCXInc7d8hbUoXn/XpK5fFr3//0nh4jfcZWav0VR4zo2bYVlRCwOuNKJID9yM4vIo7dMb4n0fnWrc/Xw==", + "dev": true, + "requires": { + "jsonpath-plus": "^7.0.0" + } + }, "asl-validator": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/asl-validator/-/asl-validator-1.10.0.tgz", - "integrity": "sha512-N7/ouOzYaEJqUNf82NarTs6Cg8sZMnHIDVGULjhzw7GLyXgoXkQUZrhvHXCyucvkU4FfeHmzY3TynEjl0/UN3Q==", + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/asl-validator/-/asl-validator-3.5.1.tgz", + "integrity": "sha512-k6ear3q0HxUHCOsJiuW/H2BAGWMIIx+H0iduCGwwS9WQTYhIn6Dz9bbKRW4M4EKvGD6HrEAJRLkKk/Ft22xnsg==", "dev": true, "requires": { - "ajv": "^6.12.6", + "ajv": "^8.11.0", + "asl-path-validator": "^0.11.0", "commander": "^5.1.0", - "jsonpath": "^1.1.0" + "jsonpath-plus": "^7.0.0" }, "dependencies": { + "ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + } + }, "commander": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/commander/-/commander-5.1.0.tgz", "integrity": "sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==", "dev": true + }, + "json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", + "dev": true } } }, @@ -6556,13 +4506,22 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/commander/-/commander-3.0.2.tgz", "integrity": "sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow==" + }, + "js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } } } }, "aws-sdk": { - "version": "2.1313.0", - "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1313.0.tgz", - "integrity": "sha512-8GMdtV2Uch3HL2c6+P3lNZFTcg/fqq9L3EWYRLb6ljCZvWKTssjdkjSJFDyTReNgeiKV224YRPYQbKpOEz4flQ==", + "version": "2.1337.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1337.0.tgz", + "integrity": "sha512-Nq6EnocdYP7LScN3kbG1ZKQr5iMEIkObW4ikSjkSz9SofPNDlOr4EFfYNaGzuF91M93YZ++7JIGMTb05ubjHlA==", "requires": { "buffer": "4.9.2", "events": "1.1.1", @@ -6736,6 +4695,22 @@ "safe-buffer": "5.1.2" } }, + "bcrypt": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.0.tgz", + "integrity": "sha512-RHBS7HI5N5tEnGTmtR/pppX0mmDSBpQ4aCBsj7CEQfYXDcO74A8sIBYcJMuCsis2E81zDxeENYhv66oZwLiA+Q==", + "requires": { + "@mapbox/node-pre-gyp": "^1.0.10", + "node-addon-api": "^5.0.0" + }, + "dependencies": { + "node-addon-api": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", + "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==" + } + } + }, "before-after-hook": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", @@ -6748,37 +4723,24 @@ "dev": true }, "bl": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", - "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", "dev": true, "requires": { - "readable-stream": "^2.3.5", - "safe-buffer": "^5.1.1" + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" }, "dependencies": { - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", "dev": true, "requires": { - "safe-buffer": "~5.1.0" + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" } } } @@ -6789,59 +4751,56 @@ "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" }, "body-parser": { - "version": "1.19.2", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.2.tgz", - "integrity": "sha512-SAAwOxgoCKMGs9uUAUFHygfLAyaniaoun6I8mFY9pRAJL9+Kec34aU+oIjDhTycub1jozEfEwx1W1IuOYxVSFw==", + "version": "1.20.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz", + "integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==", "requires": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", - "depd": "~1.1.2", - "http-errors": "1.8.1", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", "iconv-lite": "0.4.24", - "on-finished": "~2.3.0", - "qs": "6.9.7", - "raw-body": "2.4.3", - "type-is": "~1.6.18" + "on-finished": "2.4.1", + "qs": "6.11.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" }, "dependencies": { - "bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" }, "http-errors": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", - "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", "requires": { - "depd": "~1.1.2", + "depd": "2.0.0", "inherits": "2.0.4", "setprototypeof": "1.2.0", - "statuses": ">= 1.5.0 < 2", + "statuses": "2.0.1", "toidentifier": "1.0.1" } }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "raw-body": { - "version": "2.4.3", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.3.tgz", - "integrity": "sha512-UlTNLIcu0uzb4D2f4WltY6cVjLi+/jEN4lgEUj3E04tpMDpUlkBo/eSn6zou9hum2VMNpCCUone0O0WeJim07g==", + "on-finished": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", + "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", "requires": { - "bytes": "3.1.2", - "http-errors": "1.8.1", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" + "ee-first": "1.1.1" } }, - "toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" + "qs": { + "version": "6.11.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", + "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "requires": { + "side-channel": "^1.0.4" + } } } }, @@ -6965,9 +4924,9 @@ "dev": true }, "bytes": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", - "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", + "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" }, "cacache": { "version": "15.3.0", @@ -7044,6 +5003,16 @@ "requires": { "pump": "^3.0.0" } + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } } } }, @@ -7161,24 +5130,12 @@ } }, "cli-cursor": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", - "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", "dev": true, "requires": { - "restore-cursor": "^2.0.0" - }, - "dependencies": { - "restore-cursor": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", - "integrity": "sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==", - "dev": true, - "requires": { - "onetime": "^2.0.0", - "signal-exit": "^3.0.2" - } - } + "restore-cursor": "^3.1.0" } }, "cli-progress-footer": { @@ -7194,14 +5151,6 @@ "process-utils": "^4.0.0", "timers-ext": "^0.1.7", "type": "^2.6.0" - }, - "dependencies": { - "mute-stream": { - "version": "0.0.8", - "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", - "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", - "dev": true - } } }, "cli-spinners": { @@ -7421,6 +5370,100 @@ } } }, + "cloudmersive-virus-api-client": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/cloudmersive-virus-api-client/-/cloudmersive-virus-api-client-1.2.7.tgz", + "integrity": "sha512-t3eHfZtnCZdtgsvwZ0WaGWTgRDsiFdwtId5sK8ppYcM8ntj/pHh7qZAwADQsuaAxdOcpxwMygKBBNJTflylMIQ==", + "requires": { + "superagent": "3.7.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "superagent": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.7.0.tgz", + "integrity": "sha512-/8trxO6NbLx4YXb7IeeFTSmsQ35pQBiTBsLNvobZx7qBzBeHYvKCyIIhW2gNcWbLzYxPAjdgFbiepd8ypwC0Gw==", + "requires": { + "component-emitter": "^1.2.0", + "cookiejar": "^2.1.0", + "debug": "^3.1.0", + "extend": "^3.0.0", + "form-data": "^2.3.1", + "formidable": "^1.1.1", + "methods": "^1.1.1", + "mime": "^1.4.1", + "qs": "^6.5.1", + "readable-stream": "^2.0.5" + }, + "dependencies": { + "cookiejar": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz", + "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==" + }, + "formidable": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/formidable/-/formidable-1.2.2.tgz", + "integrity": "sha512-V8gLm+41I/8kguQ4/o1D3RIHRmhYFG4pnNyonvua+40rqcEmT4+V71yaZ3B457xbbgCsCfjSPi65u/W6vK1U5Q==" + }, + "mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" + }, + "qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-IhMFgUmuNpyRfxA90umL7ByLlgRXu6tIfKPpF5TmcfRLlLCckfP/g3IQmju6jjpu+Hh8rA+2p6A27ZSPOOHdKw==" + } + } + } + } + }, "co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -7467,8 +5510,7 @@ "color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "dev": true + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==" }, "colorette": { "version": "1.2.2", @@ -7562,6 +5604,15 @@ "universalify": "^2.0.0" } }, + "minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, "universalify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.0.tgz", @@ -7581,9 +5632,9 @@ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, "concurrently": { - "version": "7.4.0", - "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-7.4.0.tgz", - "integrity": "sha512-M6AfrueDt/GEna/Vg9BqQ+93yuvzkSKmoTixnwEJkH0LlcGrRC2eCmjeG1tLLHIYfpYJABokqSGyMcXjm96AFA==", + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/concurrently/-/concurrently-7.6.0.tgz", + "integrity": "sha512-BKtRgvcJGeZ4XttiDiNcFiRlxoAeZOseqUvyYRUp/Vtd+9p1ULmeoSqGsDA+2ivdeDFpqrJvGvmI+StKfKl5hw==", "dev": true, "requires": { "chalk": "^4.1.0", @@ -7597,12 +5648,6 @@ "yargs": "^17.3.1" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -7633,6 +5678,17 @@ } } }, + "cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + } + }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -7654,41 +5710,15 @@ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true - }, "rxjs": { - "version": "7.5.7", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.5.7.tgz", - "integrity": "sha512-z9MzKh/UcOqB3i20H6rtrlaE/CgjLOvheWK/9ILrbhROGTweAi1BaFsTT9FbwZi5Trr1qNRs+MXkhmR06awzQA==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.0.tgz", + "integrity": "sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==", "dev": true, "requires": { "tslib": "^2.1.0" } }, - "string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, - "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "supports-color": { "version": "8.1.1", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", @@ -7699,24 +5729,35 @@ } }, "tslib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.0.tgz", - "integrity": "sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", + "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==", "dev": true }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, "yargs": { - "version": "17.5.1", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.5.1.tgz", - "integrity": "sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA==", + "version": "17.7.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.1.tgz", + "integrity": "sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw==", "dev": true, "requires": { - "cliui": "^7.0.2", + "cliui": "^8.0.1", "escalade": "^3.1.1", "get-caller-file": "^2.0.5", "require-directory": "^2.1.1", "string-width": "^4.2.3", "y18n": "^5.0.5", - "yargs-parser": "^21.0.0" + "yargs-parser": "^21.1.1" } }, "yargs-parser": { @@ -7727,17 +5768,39 @@ } } }, - "confusing-browser-globals": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.10.tgz", - "integrity": "sha512-gNld/3lySHwuhaVluJUKLePYirM3QNCKzVxqAdhJII9/WXKVX5PURzMVJspS1jTslSqjeuG4KMVTSouit5YPHA==", - "dev": true - }, + "confusing-browser-globals": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/confusing-browser-globals/-/confusing-browser-globals-1.0.10.tgz", + "integrity": "sha512-gNld/3lySHwuhaVluJUKLePYirM3QNCKzVxqAdhJII9/WXKVX5PURzMVJspS1jTslSqjeuG4KMVTSouit5YPHA==", + "dev": true + }, + "connect-session-sequelize": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/connect-session-sequelize/-/connect-session-sequelize-7.1.5.tgz", + "integrity": "sha512-oCHmWlCqhWoq6GGJ9z9PWLk0mrEsIfKlLE4WHUBjRIcSRgUh4OEIDfGY7rfBnlD6clRwwYQfK6+ks0fuTFf6WA==", + "requires": { + "debug": "^4.1.1" + }, + "dependencies": { + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, "console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", - "dev": true + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" }, "content-disposition": { "version": "0.5.4", @@ -7755,9 +5818,9 @@ } }, "content-type": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", - "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==" + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", + "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==" }, "conventional-commit-types": { "version": "3.0.0", @@ -7774,6 +5837,15 @@ "safe-buffer": "~5.1.1" } }, + "convict": { + "version": "6.2.4", + "resolved": "https://registry.npmjs.org/convict/-/convict-6.2.4.tgz", + "integrity": "sha512-qN60BAwdMVdofckX7AlohVJ2x9UvjTNoKVXCL2LxFk1l7757EJqf1nySdMkPQer0bt8kQ5lQiyZ9/2NvrFBuwQ==", + "requires": { + "lodash.clonedeep": "^4.5.0", + "yargs-parser": "^20.2.7" + } + }, "cookie": { "version": "0.4.2", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz", @@ -8007,9 +6079,9 @@ "integrity": "sha512-V0pfhfr8suzyPGOx3nmq4aHqabehUZn6Ch9kyFpV79TGDTWFmHqUqXdabR7QHqxzrYolF4+tVmJhUG4OURg5dQ==" }, "decode-uri-component": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", - "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==" + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz", + "integrity": "sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og==" }, "decompress": { "version": "4.2.1", @@ -8071,6 +6143,16 @@ "tar-stream": "^1.5.2" }, "dependencies": { + "bl": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", + "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", + "dev": true, + "requires": { + "readable-stream": "^2.3.5", + "safe-buffer": "^5.1.1" + } + }, "file-type": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", @@ -8082,6 +6164,45 @@ "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", "dev": true + }, + "readable-stream": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", + "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "tar-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", + "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", + "dev": true, + "requires": { + "bl": "^1.0.0", + "buffer-alloc": "^1.2.0", + "end-of-stream": "^1.0.0", + "fs-constants": "^1.0.0", + "readable-stream": "^2.3.0", + "to-buffer": "^1.1.1", + "xtend": "^4.0.0" + } } } }, @@ -8228,6 +6349,7 @@ "version": "1.1.3", "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, "requires": { "object-keys": "^1.0.12" } @@ -8324,8 +6446,7 @@ "delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=", - "dev": true + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" }, "depd": { "version": "1.1.2", @@ -8338,14 +6459,14 @@ "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" }, "destroy": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", - "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" }, "detect-file": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/detect-file/-/detect-file-1.0.0.tgz", - "integrity": "sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc=", + "integrity": "sha512-DtCOLG98P007x7wiiOmfI0fi3eIKyWiLTGJ2MDnVi/E04lWGbf+JzrRHMm0rgIIZJGtHpKpbVgLWHrv8xXpc3Q==", "dev": true }, "detect-indent": { @@ -8428,16 +6549,6 @@ } } }, - "dompurify": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.3.1.tgz", - "integrity": "sha512-xGWt+NHAQS+4tpgbOAI08yxW0Pr256Gu/FNE2frZVTbgrBUn8M7tz7/ktS/LZ2MHeGqz6topj0/xY+y8R5FBFw==" - }, - "dotenv": { - "version": "16.0.3", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.0.3.tgz", - "integrity": "sha512-7GO6HghkA5fYG9TYnNxi14/7K9f5occMlp3zXAuSxn7CKCxt9xbNWG7yF8hTCSUchlfWSe3uLmlPfigevRItzQ==" - }, "dottie": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/dottie/-/dottie-2.0.3.tgz", @@ -8490,8 +6601,7 @@ "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "enabled": { "version": "2.0.0", @@ -8501,7 +6611,7 @@ "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=" + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" }, "encoding": { "version": "0.1.13", @@ -8594,6 +6704,7 @@ "version": "1.2.1", "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, "requires": { "is-callable": "^1.1.4", "is-date-object": "^1.0.1", @@ -8667,7 +6778,7 @@ "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" }, "escape-string-regexp": { "version": "1.0.5", @@ -9313,7 +7424,7 @@ "etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=" + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==" }, "event-emitter": { "version": "0.3.5", @@ -9328,7 +7439,7 @@ "events": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", - "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" + "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==" }, "execa": { "version": "5.1.1", @@ -9373,7 +7484,7 @@ "expand-tilde": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/expand-tilde/-/expand-tilde-2.0.2.tgz", - "integrity": "sha1-l+gBqgUt8CRU3kawK/YhZCzchQI=", + "integrity": "sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw==", "dev": true, "requires": { "homedir-polyfill": "^1.0.1" @@ -9433,6 +7544,71 @@ "vary": "~1.1.2" }, "dependencies": { + "body-parser": { + "version": "1.19.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.2.tgz", + "integrity": "sha512-SAAwOxgoCKMGs9uUAUFHygfLAyaniaoun6I8mFY9pRAJL9+Kec34aU+oIjDhTycub1jozEfEwx1W1IuOYxVSFw==", + "requires": { + "bytes": "3.1.2", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.8.1", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.9.7", + "raw-body": "2.4.3", + "type-is": "~1.6.18" + } + }, + "raw-body": { + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.3.tgz", + "integrity": "sha512-UlTNLIcu0uzb4D2f4WltY6cVjLi+/jEN4lgEUj3E04tpMDpUlkBo/eSn6zou9hum2VMNpCCUone0O0WeJim07g==", + "requires": { + "bytes": "3.1.2", + "http-errors": "1.8.1", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + } + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + }, + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" + } + } + }, + "express-rate-limit": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-6.7.0.tgz", + "integrity": "sha512-vhwIdRoqcYB/72TK3tRZI+0ttS8Ytrk24GfmsxDXK9o9IhHNO5bXRiXQSExPQ4GbaE5tvIS7j1SGrxsuWs+sGA==" + }, + "express-session": { + "version": "1.17.3", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.17.3.tgz", + "integrity": "sha512-4+otWXlShYlG1Ma+2Jnn+xgKUZTMJ5QD3YvfilX3AcocOAbIkVylSWEklzALe/+Pu4qV6TYBj5GwOBFfdKqLBw==", + "requires": { + "cookie": "0.4.2", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-headers": "~1.0.2", + "parseurl": "~1.3.3", + "safe-buffer": "5.2.1", + "uid-safe": "~2.1.5" + }, + "dependencies": { + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -9468,6 +7644,11 @@ "sort-keys-length": "^1.0.0" } }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, "external-editor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", @@ -9641,16 +7822,23 @@ "parseurl": "~1.3.3", "statuses": "~1.5.0", "unpipe": "~1.0.0" + }, + "dependencies": { + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" + } } }, "find-node-modules": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/find-node-modules/-/find-node-modules-2.1.2.tgz", - "integrity": "sha512-x+3P4mbtRPlSiVE1Qco0Z4YLU8WFiFcuWTf3m75OV9Uzcfs2Bg+O9N+r/K0AnmINBW06KpfqKwYJbFlFq4qNug==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/find-node-modules/-/find-node-modules-2.1.3.tgz", + "integrity": "sha512-UC2I2+nx1ZuOBclWVNdcnbDR5dlrOdVb7xNjmT/lHE+LsgztWks3dG7boJ37yTS/venXw84B/mAW9uHVoC5QRg==", "dev": true, "requires": { "findup-sync": "^4.0.0", - "merge": "^2.1.0" + "merge": "^2.1.1" } }, "find-requires": { @@ -9752,9 +7940,9 @@ }, "dependencies": { "qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.11.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.1.tgz", + "integrity": "sha512-0wsrzgTz/kAVIeuxSjnpGC56rzYtr6JT/2BwEvMaPhFIoYa1aGO8LbzuU1R0uUYQkLpWBTOj0l/CLAJB64J6nQ==", "requires": { "side-channel": "^1.0.4" } @@ -9769,7 +7957,7 @@ "fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" }, "fs-constants": { "version": "1.0.0", @@ -9835,9 +8023,9 @@ }, "dependencies": { "ignore": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", - "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "version": "5.2.4", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.4.tgz", + "integrity": "sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==", "dev": true } } @@ -9886,201 +8074,16 @@ "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" }, - "function.prototype.name": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz", - "integrity": "sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "es-abstract": "^1.19.0", - "functions-have-names": "^1.2.2" - }, - "dependencies": { - "es-abstract": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.3.tgz", - "integrity": "sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw==", - "requires": { - "call-bind": "^1.0.2", - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.1.3", - "get-symbol-description": "^1.0.0", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "is-callable": "^1.2.6", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.2", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trimend": "^1.0.5", - "string.prototype.trimstart": "^1.0.5", - "unbox-primitive": "^1.0.2" - } - }, - "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==" - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" - }, - "is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==" - }, - "is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" - }, - "is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "requires": { - "call-bind": "^1.0.2" - } - }, - "is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" - }, - "object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - }, - "dependencies": { - "define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "requires": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - } - } - } - }, - "string.prototype.trimend": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", - "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - }, - "dependencies": { - "define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "requires": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - } - } - } - }, - "string.prototype.trimstart": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", - "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - }, - "dependencies": { - "define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "requires": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - } - } - } - }, - "unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "requires": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - } - } - } - }, "functional-red-black-tree": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", "dev": true }, - "functions-have-names": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz", - "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==" - }, "gauge": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "dev": true, "requires": { "aproba": "^1.0.3 || ^2.0.0", "color-support": "^1.1.2", @@ -10106,13 +8109,13 @@ "dev": true }, "get-intrinsic": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", - "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz", + "integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==", "requires": { "function-bind": "^1.1.1", "has": "^1.0.3", - "has-symbols": "^1.0.1" + "has-symbols": "^1.0.3" } }, "get-own-enumerable-property-symbols": { @@ -10137,6 +8140,7 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "dev": true, "requires": { "call-bind": "^1.0.2", "get-intrinsic": "^1.1.1" @@ -10221,7 +8225,7 @@ "global-prefix": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", - "integrity": "sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=", + "integrity": "sha512-5lsx1NUDHtSjfg0eHlmYvZKv8/nVqX4ckFbM+FrGcQ+04KWcWFo9P5MxPZYSzUvyzmdTbI7Eix8Q4IbELDqzKg==", "dev": true, "requires": { "expand-tilde": "^2.0.2", @@ -10259,6 +8263,14 @@ } } }, + "gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "requires": { + "get-intrinsic": "^1.1.3" + } + }, "got": { "version": "11.8.6", "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", @@ -10279,9 +8291,9 @@ } }, "graceful-fs": { - "version": "4.2.10", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", - "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==" + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, "handlebars": { "version": "4.7.7", @@ -10316,18 +8328,10 @@ "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", "dev": true }, - "has-property-descriptors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz", - "integrity": "sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ==", - "requires": { - "get-intrinsic": "^1.1.1" - } - }, "has-symbols": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz", - "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==" + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" }, "has-tostringtag": { "version": "1.0.0", @@ -10340,8 +8344,7 @@ "has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", - "dev": true + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" }, "helmet": { "version": "4.6.0", @@ -10383,31 +8386,21 @@ "dev": true }, "http-errors": { - "version": "1.8.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.0.tgz", - "integrity": "sha512-4I8r0C5JDhT5VkvI47QktDW75rNlGVsUf/8hzjCC/wkWI/jdTRmBb9aI7erSG82r1bjKY3F6k28WnsVxB1C73A==", + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", + "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", "requires": { "depd": "~1.1.2", "inherits": "2.0.4", "setprototypeof": "1.2.0", "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.0" + "toidentifier": "1.0.1" }, "dependencies": { - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" - }, "statuses": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" } } }, @@ -10447,18 +8440,18 @@ } }, "https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz", + "integrity": "sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA==", "requires": { "agent-base": "6", "debug": "4" }, "dependencies": { "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "requires": { "ms": "2.1.2" } @@ -10581,9 +8574,9 @@ } }, "inherits": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", - "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "ini": { "version": "1.3.8", @@ -10633,15 +8626,6 @@ "supports-color": "^7.1.0" } }, - "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "requires": { - "restore-cursor": "^3.1.0" - } - }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -10704,6 +8688,7 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.3.tgz", "integrity": "sha512-O0DB1JC/sPyZl7cIo78n5dR7eUSwwpYPiXRhTzNxZVAMUuB8vlnRFyLxdrVToks6XPLVnFfbzaVd5WLjhgg+vA==", + "dev": true, "requires": { "get-intrinsic": "^1.1.0", "has": "^1.0.3", @@ -10744,7 +8729,8 @@ "is-bigint": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.1.tgz", - "integrity": "sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg==" + "integrity": "sha512-J0ELF4yHFxHy0cmSxZuheDOz2luOdVvqjwmEcj8H/L1JHeuEDSDbeRP+Dk9kFVk5RTFzbucJ2Kb9F7ixY2QaCg==", + "dev": true }, "is-binary-path": { "version": "2.1.0", @@ -10759,6 +8745,7 @@ "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.1.0.tgz", "integrity": "sha512-a7Uprx8UtD+HWdyYwnD1+ExtTgqQtD2k/1yJgtXP6wnMm8byhkoTZRl+95LLThpzNZJ5aEvi46cdH+ayMFRwmA==", + "dev": true, "requires": { "call-bind": "^1.0.0" } @@ -10780,7 +8767,8 @@ "is-date-object": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "dev": true }, "is-docker": { "version": "2.2.1", @@ -10797,8 +8785,7 @@ "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "is-generator-fn": { "version": "2.1.0", @@ -10857,7 +8844,8 @@ "is-number-object": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.4.tgz", - "integrity": "sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==" + "integrity": "sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==", + "dev": true }, "is-obj": { "version": "1.0.1", @@ -10941,7 +8929,8 @@ "is-string": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==" + "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", + "dev": true }, "is-svg": { "version": "4.4.0", @@ -10965,164 +8954,21 @@ "version": "1.0.3", "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, "requires": { "has-symbols": "^1.0.1" } }, "is-typed-array": { - "version": "1.1.9", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.9.tgz", - "integrity": "sha512-kfrlnTTn8pZkfpJMUgYD7YZ3qzeJgWUn8XfVYBARc4wnmNOmLbmuuaAs3q5fvB0UJOn6yHAKaGTPM7d6ezoD/A==", + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.10.tgz", + "integrity": "sha512-PJqgEHiWZvMpaFZ3uTc8kHPM4+4ADTlDniuQL7cU/UDA0Ql7F70yGfHph3cLNe+c9toaigv+DFzTJKhc2CtO6A==", "requires": { "available-typed-arrays": "^1.0.5", "call-bind": "^1.0.2", - "es-abstract": "^1.20.0", "for-each": "^0.3.3", + "gopd": "^1.0.1", "has-tostringtag": "^1.0.0" - }, - "dependencies": { - "define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "requires": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - } - }, - "es-abstract": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.3.tgz", - "integrity": "sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw==", - "requires": { - "call-bind": "^1.0.2", - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.1.3", - "get-symbol-description": "^1.0.0", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "is-callable": "^1.2.6", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.2", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trimend": "^1.0.5", - "string.prototype.trimstart": "^1.0.5", - "unbox-primitive": "^1.0.2" - } - }, - "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==" - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" - }, - "is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==" - }, - "is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" - }, - "is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "requires": { - "call-bind": "^1.0.2" - } - }, - "is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" - }, - "object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - } - }, - "string.prototype.trimend": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", - "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - } - }, - "string.prototype.trimstart": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", - "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - } - }, - "unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "requires": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - } - } } }, "is-typedarray": { @@ -11153,6 +8999,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, "requires": { "call-bind": "^1.0.2" } @@ -11194,6 +9041,288 @@ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", "dev": true }, + "isomorphic-dompurify": { + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/isomorphic-dompurify/-/isomorphic-dompurify-0.24.0.tgz", + "integrity": "sha512-YePhHHQAVsU1CYkL3gKQmga+fTAh66eWg+RVQOVFRNfzoLkd+gFhFY5S+g80f8b0v2JBMYg+npqdZI1vOxTOBQ==", + "requires": { + "@types/dompurify": "^2.3.4", + "dompurify": "^2.4.1", + "jsdom": "^20.0.1" + }, + "dependencies": { + "@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==" + }, + "abab": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==" + }, + "acorn": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz", + "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==" + }, + "acorn-globals": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-7.0.1.tgz", + "integrity": "sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==", + "requires": { + "acorn": "^8.1.0", + "acorn-walk": "^8.0.2" + } + }, + "acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==" + }, + "cssom": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.5.0.tgz", + "integrity": "sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw==" + }, + "data-urls": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-3.0.2.tgz", + "integrity": "sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==", + "requires": { + "abab": "^2.0.6", + "whatwg-mimetype": "^3.0.0", + "whatwg-url": "^11.0.0" + } + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "decimal.js": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==" + }, + "domexception": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", + "integrity": "sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==", + "requires": { + "webidl-conversions": "^7.0.0" + } + }, + "dompurify": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.4.1.tgz", + "integrity": "sha512-ewwFzHzrrneRjxzmK6oVz/rZn9VWspGFRDb4/rRtIsM1n36t9AKma/ye8syCpcw+XJ25kOK/hOG7t1j2I2yBqA==" + }, + "entities": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz", + "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==" + }, + "escodegen": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", + "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", + "requires": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1", + "source-map": "~0.6.1" + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" + }, + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "html-encoding-sniffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", + "integrity": "sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==", + "requires": { + "whatwg-encoding": "^2.0.0" + } + }, + "http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "requires": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + } + }, + "https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + }, + "jsdom": { + "version": "20.0.3", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-20.0.3.tgz", + "integrity": "sha512-SYhBvTh89tTfCD/CRdSOm13mOBa42iTaTyfyEWBdKcGdPxPtLFBXuHR8XHb33YNYaP+lLbmSvBTsnoesCNJEsQ==", + "requires": { + "abab": "^2.0.6", + "acorn": "^8.8.1", + "acorn-globals": "^7.0.0", + "cssom": "^0.5.0", + "cssstyle": "^2.3.0", + "data-urls": "^3.0.2", + "decimal.js": "^10.4.2", + "domexception": "^4.0.0", + "escodegen": "^2.0.0", + "form-data": "^4.0.0", + "html-encoding-sniffer": "^3.0.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.1", + "is-potential-custom-element-name": "^1.0.1", + "nwsapi": "^2.2.2", + "parse5": "^7.1.1", + "saxes": "^6.0.0", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.1.2", + "w3c-xmlserializer": "^4.0.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^2.0.0", + "whatwg-mimetype": "^3.0.0", + "whatwg-url": "^11.0.0", + "ws": "^8.11.0", + "xml-name-validator": "^4.0.0" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "nwsapi": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.2.tgz", + "integrity": "sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw==" + }, + "parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "requires": { + "entities": "^4.4.0" + } + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "saxes": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", + "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==", + "requires": { + "xmlchars": "^2.2.0" + } + }, + "tough-cookie": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.2.tgz", + "integrity": "sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ==", + "requires": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + } + }, + "tr46": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", + "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "requires": { + "punycode": "^2.1.1" + } + }, + "universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==" + }, + "w3c-xmlserializer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz", + "integrity": "sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==", + "requires": { + "xml-name-validator": "^4.0.0" + } + }, + "webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==" + }, + "whatwg-encoding": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz", + "integrity": "sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==", + "requires": { + "iconv-lite": "0.6.3" + } + }, + "whatwg-mimetype": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", + "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==" + }, + "whatwg-url": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", + "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "requires": { + "tr46": "^3.0.0", + "webidl-conversions": "^7.0.0" + } + }, + "ws": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz", + "integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==" + }, + "xml-name-validator": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz", + "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==" + } + } + }, "isomorphic-git": { "version": "1.18.3", "resolved": "https://registry.npmjs.org/isomorphic-git/-/isomorphic-git-1.18.3.tgz", @@ -12657,9 +10786,10 @@ "dev": true }, "js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, "requires": { "argparse": "^1.0.7", "esprima": "^4.0.0" @@ -12760,12 +10890,12 @@ "dev": true }, "json5": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz", - "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.0.tgz", + "integrity": "sha512-f+8cldu7X/y7RAJurMEJmdoKXGB/X550w2Nr3tTbezL6RwEE/iMcm+tZnXeoZtKuOq6ft8+CqzEkrIgx1fPoQA==", "dev": true, "requires": { - "minimist": "^1.2.0" + "minimist": "^1.2.5" } }, "jsonfile": { @@ -12776,24 +10906,11 @@ "graceful-fs": "^4.1.6" } }, - "jsonpath": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.1.1.tgz", - "integrity": "sha512-l6Cg7jRpixfbgoWgkrl77dgEj8RPvND0wMH6TwQmi9Qs4TFfS9u5cUFnbeKTwj5ga5Y3BTGGNI28k117LJ009w==", - "dev": true, - "requires": { - "esprima": "1.2.2", - "static-eval": "2.0.2", - "underscore": "1.12.1" - }, - "dependencies": { - "esprima": { - "version": "1.2.2", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz", - "integrity": "sha512-+JpPZam9w5DuJ3Q67SqsMGtiHKENSMRVoxvArfJZK01/BfLEObtZ6orJa/MtoGNR/rfMgp5837T41PAmTwAv/A==", - "dev": true - } - } + "jsonpath-plus": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-7.2.0.tgz", + "integrity": "sha512-zBfiUPM5nD0YZSBT/o/fbCUlCcepMIdP0CJZxM1+KgA4f2T206f6VAg9e7mX35+KlMaIc5qXW34f3BnwJ3w+RA==", + "dev": true }, "jsonwebtoken": { "version": "8.5.1", @@ -12838,16 +10955,10 @@ "safe-buffer": "^5.0.1" } }, - "jwt-decode": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", - "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==", - "dev": true - }, "keyv": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.0.tgz", - "integrity": "sha512-2YvuMsA+jnFGtBareKqgANOEKe1mk3HKiXu2fRmAfyxG0MJAywNhi5ttWA3PMjl4NmpyjZNbFifR2vNjW1znfA==", + "version": "4.5.2", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.2.tgz", + "integrity": "sha512-5MHbFaKn8cNSmVW7BYnijeAVlE4cYA/SVkifVgrh7yotnfhKmjuXpDKjrABLnT0SfHWV21P8ow07OGfRrNDg8g==", "dev": true, "requires": { "json-buffer": "3.0.1" @@ -13006,6 +11117,16 @@ "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "dev": true }, + "micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "requires": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + } + }, "mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", @@ -13183,8 +11304,7 @@ "lodash.clonedeep": { "version": "4.5.0", "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", - "dev": true + "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=" }, "lodash.find": { "version": "4.6.0", @@ -13311,12 +11431,6 @@ "type": "^2.5.0" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "has-flag": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", @@ -13515,7 +11629,6 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "dev": true, "requires": { "semver": "^6.0.0" }, @@ -13523,8 +11636,7 @@ "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, @@ -13618,7 +11730,7 @@ "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=" + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==" }, "memoizee": { "version": "0.4.15", @@ -13645,7 +11757,7 @@ "merge-descriptors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" }, "merge-stream": { "version": "2.0.0", @@ -13665,13 +11777,13 @@ "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=" }, "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", "dev": true, "requires": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" + "braces": "^3.0.1", + "picomatch": "^2.2.3" } }, "mime": { @@ -13680,16 +11792,16 @@ "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" }, "mime-db": { - "version": "1.40.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", - "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==" + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" }, "mime-types": { - "version": "2.1.24", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", - "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", "requires": { - "mime-db": "1.40.0" + "mime-db": "1.52.0" } }, "mimic-fn": { @@ -13704,9 +11816,9 @@ "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==" }, "minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", "requires": { "brace-expansion": "^1.1.7" } @@ -13896,9 +12008,7 @@ "negotiator": { "version": "0.6.3", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", - "dev": true, - "optional": true + "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" }, "neo-async": { "version": "2.6.2", @@ -13922,6 +12032,11 @@ "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==", "dev": true }, + "nocache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/nocache/-/nocache-3.0.4.tgz", + "integrity": "sha512-WDD0bdg9mbq6F4mRxEYcPWwfA1vxd0mrvKOyxI7Xj/atfRHVeutzuWByG//jfm4uPzp0y4Kj051EORCBSQMycw==" + }, "node-addon-api": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", @@ -13939,17 +12054,17 @@ "tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=" + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "requires": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" @@ -14077,7 +12192,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "dev": true, "requires": { "abbrev": "1" } @@ -14107,7 +12221,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "dev": true, "requires": { "are-we-there-yet": "^2.0.0", "console-control-strings": "^1.1.0", @@ -14133,7 +12246,8 @@ "object-keys": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true }, "object.assign": { "version": "4.1.2", @@ -14273,20 +12387,12 @@ } }, "onetime": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", - "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dev": true, "requires": { - "mimic-fn": "^1.0.0" - }, - "dependencies": { - "mimic-fn": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", - "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", - "dev": true - } + "mimic-fn": "^2.1.0" } }, "opentracing": { @@ -14324,12 +12430,6 @@ "wcwidth": "^1.0.1" }, "dependencies": { - "ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true - }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -14339,27 +12439,6 @@ "color-convert": "^2.0.1" } }, - "bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "requires": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -14370,15 +12449,6 @@ "supports-color": "^7.1.0" } }, - "cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", - "dev": true, - "requires": { - "restore-cursor": "^3.1.0" - } - }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -14400,46 +12470,6 @@ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true - }, - "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true - }, - "onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "requires": { - "mimic-fn": "^2.1.0" - } - }, - "restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", - "dev": true, - "requires": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - } - }, - "strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, - "requires": { - "ansi-regex": "^5.0.1" - } - }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -14611,7 +12641,7 @@ "parse-passwd": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz", - "integrity": "sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY=", + "integrity": "sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q==", "dev": true }, "parse5": { @@ -14741,9 +12771,9 @@ "dev": true }, "picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", + "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", "dev": true }, "pify": { @@ -14987,20 +13017,10 @@ "resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz", "integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==" }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, "punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", - "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==" }, "qs": { "version": "6.9.7", @@ -15021,7 +13041,12 @@ "querystring": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", - "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" + "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==" + }, + "querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" }, "queue-microtask": { "version": "1.2.3", @@ -15035,43 +13060,43 @@ "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", "dev": true }, + "random-bytes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", + "integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==" + }, "range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" }, "raw-body": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", - "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz", + "integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==", "requires": { - "bytes": "3.1.0", - "http-errors": "1.7.2", + "bytes": "3.1.2", + "http-errors": "2.0.0", "iconv-lite": "0.4.24", "unpipe": "1.0.0" }, "dependencies": { + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, "http-errors": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", - "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", + "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", "requires": { - "depd": "~1.1.2", - "inherits": "2.0.3", - "setprototypeof": "1.1.1", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.0" + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" } - }, - "setprototypeof": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", - "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" - }, - "statuses": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=" } } }, @@ -15119,16 +13144,6 @@ "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", "dev": true }, - "regexp.prototype.flags": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", - "integrity": "sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.3", - "functions-have-names": "^1.2.2" - } - }, "regexpp": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.1.0.tgz", @@ -15147,6 +13162,11 @@ "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", "dev": true }, + "requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" + }, "resolve": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", @@ -15175,7 +13195,7 @@ "resolve-dir": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/resolve-dir/-/resolve-dir-1.0.1.tgz", - "integrity": "sha1-eaQGRMNivoLybv/nOcm7U4IEb0M=", + "integrity": "sha512-R7uiTjECzvOsWSfdM0QKFNBVFcK27aHOUwdvK53BcW8zqnGdYp0Fbj82cy54+2A4P2tFM22J5kRfe1R+lM/1yg==", "dev": true, "requires": { "expand-tilde": "^2.0.0", @@ -15221,17 +13241,6 @@ "requires": { "onetime": "^5.1.0", "signal-exit": "^3.0.2" - }, - "dependencies": { - "onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "requires": { - "mimic-fn": "^2.1.0" - } - } } }, "retry": { @@ -15287,42 +13296,6 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, - "safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", - "requires": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", - "is-regex": "^1.1.4" - }, - "dependencies": { - "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" - }, - "is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - } - } - }, "safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -15331,7 +13304,7 @@ "sax": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", - "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==" }, "saxes": { "version": "5.0.1", @@ -15389,39 +13362,27 @@ "statuses": "~1.5.0" }, "dependencies": { - "http-errors": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.8.1.tgz", - "integrity": "sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==", - "requires": { - "depd": "~1.1.2", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": ">= 1.5.0 < 2", - "toidentifier": "1.0.1" - } - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "destroy": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha512-3NdhDuEXnfun/z7x9GOElY49LoqVHoGScmOKwmxhsS8N5Y+Z8KyPPDnaSzqWgYt/ji4mqwfTS34Htrk0zPIXVg==" }, "ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, - "toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" + "statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" } } }, "sequelize": { - "version": "6.29.0", - "resolved": "https://registry.npmjs.org/sequelize/-/sequelize-6.29.0.tgz", - "integrity": "sha512-m8Wi90rs3NZP9coXE52c7PL4Q078nwYZXqt1IxPvgki7nOFn0p/F0eKsYDBXCPw9G8/BCEa6zZNk0DQUAT4ypA==", + "version": "6.29.3", + "resolved": "https://registry.npmjs.org/sequelize/-/sequelize-6.29.3.tgz", + "integrity": "sha512-iLbrN//Eh18zXIlNEUNQx7lk5R+SF39m+66bnrT3x8WB8sbxMH2hF4vw8RIa9ZzB1+c94rclMv/i8fngXmb/4A==", "requires": { "@types/debug": "^4.1.7", "@types/validator": "^13.7.1", @@ -15537,19 +13498,60 @@ } }, "serverless-step-functions": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/serverless-step-functions/-/serverless-step-functions-3.10.0.tgz", - "integrity": "sha512-Zv4Gi68+g8vIsdPKK85LbF5fCBpxoZgfwM5taMB8Cb8L0LNn2sejpPZj6vRQHFmCaizmD57pPQVGCHhrB2BdcQ==", + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/serverless-step-functions/-/serverless-step-functions-3.13.0.tgz", + "integrity": "sha512-lJOedjdKShJW3bemwhvTUAMqKu/uWJYFNKEtBxgOsw/BuWxCFvL6esKY+WA3QR7jeHtBknI5U/SStI4j3a+x+w==", "dev": true, "requires": { "@hapi/joi": "^15.0.2", "@serverless/utils": "^6.7.0", - "asl-validator": "^1.9.8", + "asl-validator": "^3.1.0", "bluebird": "^3.4.0", "chalk": "^4.1.2", "lodash": "^4.17.11" }, "dependencies": { + "@serverless/utils": { + "version": "6.10.0", + "resolved": "https://registry.npmjs.org/@serverless/utils/-/utils-6.10.0.tgz", + "integrity": "sha512-1ScVcT8UUzOsOXZpY6Z/VypyZFVX5/2nmAuttD6bYLVEtavl6w+l33LFQbGLuMIRyV/6ZgaeZeyrszOOs4A2+g==", + "dev": true, + "requires": { + "archive-type": "^4.0.0", + "chalk": "^4.1.2", + "ci-info": "^3.8.0", + "cli-progress-footer": "^2.3.2", + "content-disposition": "^0.5.4", + "d": "^1.0.1", + "decompress": "^4.2.1", + "event-emitter": "^0.3.5", + "ext": "^1.7.0", + "ext-name": "^5.0.0", + "file-type": "^16.5.4", + "filenamify": "^4.3.0", + "get-stream": "^6.0.1", + "got": "^11.8.6", + "inquirer": "^8.2.5", + "js-yaml": "^4.1.0", + "jwt-decode": "^3.1.2", + "lodash": "^4.17.21", + "log": "^6.3.1", + "log-node": "^8.0.3", + "make-dir": "^3.1.0", + "memoizee": "^0.4.15", + "ms": "^2.1.3", + "ncjsm": "^4.3.2", + "node-fetch": "^2.6.9", + "open": "^8.4.2", + "p-event": "^4.2.0", + "supports-color": "^8.1.1", + "timers-ext": "^0.1.7", + "type": "^2.7.2", + "uni-global": "^1.0.0", + "uuid": "^8.3.2", + "write-file-atomic": "^4.0.2" + } + }, "ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -15559,44 +13561,158 @@ "color-convert": "^2.0.1" } }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", "dev": true, "requires": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "inquirer": { + "version": "8.2.5", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-8.2.5.tgz", + "integrity": "sha512-QAgPDQMEgrDssk1XiwwHoOGYF9BAbUcc1+j+FhEvaOt8/cKRqyLn0U5qA6F74fGhTMGxf92pOvPBeh29jQJDTQ==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^4.1.1", + "cli-cursor": "^3.1.0", + "cli-width": "^3.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.21", + "mute-stream": "0.0.8", + "ora": "^5.4.1", + "run-async": "^2.4.0", + "rxjs": "^7.5.5", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0", + "through": "^2.3.6", + "wrap-ansi": "^7.0.0" + } + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + }, + "jwt-decode": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/jwt-decode/-/jwt-decode-3.1.2.tgz", + "integrity": "sha512-UfpWE/VZn0iP50d8cz9NrZLM9lSWhcJ+0Gt/nm4by88UL+J1SiKN8/5dkjMmbEzwL2CAe+67GsegCbIKtbp75A==", + "dev": true + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "dev": true, + "requires": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + } + }, + "rxjs": { + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.0.tgz", + "integrity": "sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg==", + "dev": true, + "requires": { + "tslib": "^2.1.0" } }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", "dev": true, "requires": { - "color-name": "~1.1.4" + "has-flag": "^4.0.0" } }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "tslib": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", + "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==", "dev": true }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", "dev": true }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", "dev": true, "requires": { - "has-flag": "^4.0.0" + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "requires": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" } } } @@ -15616,8 +13732,7 @@ "set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", - "dev": true + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" }, "setprototypeof": { "version": "1.2.0", @@ -15649,9 +13764,9 @@ "dev": true }, "shell-quote": { - "version": "1.7.3", - "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.3.tgz", - "integrity": "sha512-Vpfqwm4EnqGdlsBFNmHhxhElJYrdfcxPThu+ryKS5J8L/fhAwLazFZtq+S+TWZ9ANj2piSQLGj6NQg+lKPmxrw==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.8.0.tgz", + "integrity": "sha512-QHsz8GgQIGKlRi24yFc6a6lN69Idnx634w49ay6+jA5yFh7a1UY+4Rp6HPx/L/1zcEDPEij8cIsiqR6bQsE5VQ==", "dev": true }, "side-channel": { @@ -15667,8 +13782,7 @@ "signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" }, "simple-concat": { "version": "1.0.1", @@ -15881,9 +13995,9 @@ } }, "sqlite3": { - "version": "5.1.5", - "resolved": "https://registry.npmjs.org/sqlite3/-/sqlite3-5.1.5.tgz", - "integrity": "sha512-7sP16i4wI+yKnGOO2q2ijze7EjQ9US+Vw7DYYwxfFtqNZDGgBcEw0oeDaDvUTq66uJOzVd/z6MkIg+c9erSJKg==", + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/sqlite3/-/sqlite3-5.1.6.tgz", + "integrity": "sha512-olYkWoKFVNSSSQNvxVUfjiVbz3YtBwTJj+mfV5zpHmqW3sELx2Cf4QCdirMelhM5Zh+KDVaKgQHqCxrqiWHybw==", "dev": true, "requires": { "@mapbox/node-pre-gyp": "^1.0.0", @@ -15924,19 +14038,10 @@ } } }, - "static-eval": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.2.tgz", - "integrity": "sha512-N/D219Hcr2bPjLxPiV+TQE++Tsmrady7TqAJugLy7Xk1EumfDWS/f5dtBbkRCGE7wKKXuYockQoj8Rm2/pVKyg==", - "dev": true, - "requires": { - "escodegen": "^1.8.1" - } - }, "statuses": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", - "integrity": "sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==" + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" }, "stepfunctions-localhost": { "version": "0.2.0", @@ -15963,6 +14068,15 @@ "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", "dev": true }, + "cli-cursor": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz", + "integrity": "sha512-8lgKz8LmCRYZZQDpRyT2m5rKJ08TnU4tR9FFFW2rxpxR1FzWi4PQ/NfyODchAatHaUgnSPVcx/R5w6NuTBzFiw==", + "dev": true, + "requires": { + "restore-cursor": "^2.0.0" + } + }, "fs-minipass": { "version": "1.2.7", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.7.tgz", @@ -15981,6 +14095,12 @@ "chalk": "^2.0.1" } }, + "mimic-fn": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz", + "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ==", + "dev": true + }, "minipass": { "version": "2.9.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.9.0.tgz", @@ -16009,6 +14129,15 @@ "minimist": "^1.2.6" } }, + "onetime": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz", + "integrity": "sha512-oyyPpiMaKARvvcgip+JV+7zci5L8D1W9RZIz2l1o08AM3pfspitVWnPt3mzHcBPp12oYMTy0pqrFs/C+m3EwsQ==", + "dev": true, + "requires": { + "mimic-fn": "^1.0.0" + } + }, "ora": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/ora/-/ora-3.4.0.tgz", @@ -16023,6 +14152,16 @@ "wcwidth": "^1.0.1" } }, + "restore-cursor": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz", + "integrity": "sha512-6IzJLuGi4+R14vwagDHX+JrXmPVtPpn4mffDJ1UdR7/Edm87fl6yi8mMBIVvFtJaNTUvjughmW4hwLhRG7gC1Q==", + "dev": true, + "requires": { + "onetime": "^2.0.0", + "signal-exit": "^3.0.2" + } + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -16096,7 +14235,6 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -16153,7 +14291,6 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, "requires": { "ansi-regex": "^5.0.1" } @@ -16208,21 +14345,13 @@ "peek-readable": "^4.1.0" } }, - "superagent": { - "version": "8.0.9", - "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.0.9.tgz", - "integrity": "sha512-4C7Bh5pyHTvU33KpZgwrNKh/VQnvgtCSqPRfJAUdmrtSYePVzVg4E4OzsrbkhJj9O7SO6Bnv75K/F8XVZT8YHA==", + "supertest": { + "version": "6.3.3", + "resolved": "https://registry.npmjs.org/supertest/-/supertest-6.3.3.tgz", + "integrity": "sha512-EMCG6G8gDu5qEqRQ3JjjPs6+FYT1a7Hv5ApHvtSghmOFJYtsU5S+pSb6Y2EUeCEY3CmEL3mmQ8YWlPOzQomabA==", "requires": { - "component-emitter": "^1.3.0", - "cookiejar": "^2.1.4", - "debug": "^4.3.4", - "fast-safe-stringify": "^2.1.1", - "form-data": "^4.0.0", - "formidable": "^2.1.2", "methods": "^1.1.2", - "mime": "2.6.0", - "qs": "^6.11.0", - "semver": "^7.3.8" + "superagent": "^8.0.5" }, "dependencies": { "debug": { @@ -16262,9 +14391,9 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "version": "6.11.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.1.tgz", + "integrity": "sha512-0wsrzgTz/kAVIeuxSjnpGC56rzYtr6JT/2BwEvMaPhFIoYa1aGO8LbzuU1R0uUYQkLpWBTOj0l/CLAJB64J6nQ==", "requires": { "side-channel": "^1.0.4" } @@ -16277,6 +14406,23 @@ "lru-cache": "^6.0.0" } }, + "superagent": { + "version": "8.0.9", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-8.0.9.tgz", + "integrity": "sha512-4C7Bh5pyHTvU33KpZgwrNKh/VQnvgtCSqPRfJAUdmrtSYePVzVg4E4OzsrbkhJj9O7SO6Bnv75K/F8XVZT8YHA==", + "requires": { + "component-emitter": "^1.3.0", + "cookiejar": "^2.1.4", + "debug": "^4.3.4", + "fast-safe-stringify": "^2.1.1", + "form-data": "^4.0.0", + "formidable": "^2.1.2", + "methods": "^1.1.2", + "mime": "2.6.0", + "qs": "^6.11.0", + "semver": "^7.3.8" + } + }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -16284,15 +14430,6 @@ } } }, - "supertest": { - "version": "6.3.3", - "resolved": "https://registry.npmjs.org/supertest/-/supertest-6.3.3.tgz", - "integrity": "sha512-EMCG6G8gDu5qEqRQ3JjjPs6+FYT1a7Hv5ApHvtSghmOFJYtsU5S+pSb6Y2EUeCEY3CmEL3mmQ8YWlPOzQomabA==", - "requires": { - "methods": "^1.1.2", - "superagent": "^8.0.5" - } - }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -16407,23 +14544,18 @@ } }, "tar": { - "version": "6.1.13", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.13.tgz", - "integrity": "sha512-jdIBIN6LTIe2jqzay/2vtYLlBHa3JF42ot3h1dW8Q0PaAG4v8rm0cvpVePtau5C6OKXGGcgO9q2AMNSWxiLqKw==", + "version": "6.1.11", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", + "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", "requires": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", - "minipass": "^4.0.0", + "minipass": "^3.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" }, "dependencies": { - "minipass": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-4.0.3.tgz", - "integrity": "sha512-OW2r4sQ0sI+z5ckEt5c1Tri4xTgZwYDxpE54eqWlQloQRoWtXjqt9udJ5Z4dSv7wK+nfFI7FRXyCpBSft+gpFw==" - }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -16431,47 +14563,6 @@ } } }, - "tar-stream": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", - "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", - "dev": true, - "requires": { - "bl": "^1.0.0", - "buffer-alloc": "^1.2.0", - "end-of-stream": "^1.0.0", - "fs-constants": "^1.0.0", - "readable-stream": "^2.3.0", - "to-buffer": "^1.1.1", - "xtend": "^4.0.0" - }, - "dependencies": { - "readable-stream": { - "version": "2.3.7", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", - "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", - "dev": true, - "requires": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "string_decoder": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", - "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", - "dev": true, - "requires": { - "safe-buffer": "~5.1.0" - } - } - } - }, "tcp-port-used": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/tcp-port-used/-/tcp-port-used-1.0.2.tgz", @@ -16611,9 +14702,9 @@ } }, "toidentifier": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", - "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", + "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" }, "token-types": { "version": "4.2.0", @@ -16639,7 +14730,7 @@ "toposort-class": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toposort-class/-/toposort-class-1.0.1.tgz", - "integrity": "sha1-f/0feMi+KMO6Rc1OGj9e4ZO9mYg=" + "integrity": "sha512-OsLcGGbYF3rMjPUf8oKktyvCiUxSbqMMS39m33MAjLTC1DVIH6x3WSt63/M77ihI09+Sdfk1AXvfhCEeUmC7mg==" }, "tough-cookie": { "version": "4.0.0", @@ -16709,12 +14800,6 @@ "yargs-parser": "20.x" }, "dependencies": { - "json5": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", - "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", - "dev": true - }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -16854,6 +14939,15 @@ "strip-bom": "^3.0.0" }, "dependencies": { + "json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dev": true, + "requires": { + "minimist": "^1.2.0" + } + }, "strip-bom": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", @@ -16937,6 +15031,14 @@ "dev": true, "optional": true }, + "uid-safe": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", + "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", + "requires": { + "random-bytes": "~1.0.0" + } + }, "umzug": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/umzug/-/umzug-3.0.0.tgz", @@ -17005,12 +15107,6 @@ } } }, - "underscore": { - "version": "1.12.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz", - "integrity": "sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==", - "dev": true - }, "uni-global": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/uni-global/-/uni-global-1.0.0.tgz", @@ -17053,7 +15149,7 @@ "unpipe": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=" + "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==" }, "uri-js": { "version": "4.4.1", @@ -17075,22 +15171,30 @@ "url": { "version": "0.10.3", "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", - "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", "requires": { "punycode": "1.3.2", "querystring": "0.2.0" } }, + "url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "requires": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "util": { - "version": "0.12.4", - "resolved": "https://registry.npmjs.org/util/-/util-0.12.4.tgz", - "integrity": "sha512-bxZ9qtSlGUWSOy9Qa9Xgk11kSslpuZwaxCg4sNIDj6FLucDab2JxnHwyNTCpHMtK1MjoQiWQ6DiUMZYbSrO+Sw==", + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", "requires": { "inherits": "^2.0.3", "is-arguments": "^1.0.4", "is-generator-function": "^1.0.7", "is-typed-array": "^1.1.3", - "safe-buffer": "^5.1.2", "which-typed-array": "^1.1.2" } }, @@ -17102,12 +15206,12 @@ "utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" + "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" }, "uuid": { - "version": "3.4.0", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", - "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==" + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", + "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==" }, "v8-compile-cache": { "version": "2.3.0", @@ -17264,6 +15368,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz", "integrity": "sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg==", + "dev": true, "requires": { "is-bigint": "^1.0.1", "is-boolean-object": "^1.1.0", @@ -17273,167 +15378,22 @@ } }, "which-typed-array": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.8.tgz", - "integrity": "sha512-Jn4e5PItbcAHyLoRDwvPj1ypu27DJbtdYXUa5zsinrUx77Uvfb0cXwwnGMTn7cjUfhhqgVQnVJCwF+7cgU7tpw==", + "version": "1.1.9", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.9.tgz", + "integrity": "sha512-w9c4xkx6mPidwp7180ckYWfMmvxpjlZuIudNtDf4N/tTAUB8VJbX25qZoAsrtGuYNnGw3pa0AXgbGKRB8/EceA==", "requires": { "available-typed-arrays": "^1.0.5", "call-bind": "^1.0.2", - "es-abstract": "^1.20.0", "for-each": "^0.3.3", + "gopd": "^1.0.1", "has-tostringtag": "^1.0.0", - "is-typed-array": "^1.1.9" - }, - "dependencies": { - "define-properties": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.4.tgz", - "integrity": "sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA==", - "requires": { - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" - } - }, - "es-abstract": { - "version": "1.20.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.20.3.tgz", - "integrity": "sha512-AyrnaKVpMzljIdwjzrj+LxGmj8ik2LckwXacHqrJJ/jxz6dDDBcZ7I7nlHM0FvEW8MfbWJwOd+yT2XzYW49Frw==", - "requires": { - "call-bind": "^1.0.2", - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "function.prototype.name": "^1.1.5", - "get-intrinsic": "^1.1.3", - "get-symbol-description": "^1.0.0", - "has": "^1.0.3", - "has-property-descriptors": "^1.0.0", - "has-symbols": "^1.0.3", - "internal-slot": "^1.0.3", - "is-callable": "^1.2.6", - "is-negative-zero": "^2.0.2", - "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", - "is-string": "^1.0.7", - "is-weakref": "^1.0.2", - "object-inspect": "^1.12.2", - "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.4.3", - "safe-regex-test": "^1.0.0", - "string.prototype.trimend": "^1.0.5", - "string.prototype.trimstart": "^1.0.5", - "unbox-primitive": "^1.0.2" - } - }, - "get-intrinsic": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.3.tgz", - "integrity": "sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A==", - "requires": { - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.3" - } - }, - "has-bigints": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.2.tgz", - "integrity": "sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==" - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" - }, - "is-callable": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", - "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==" - }, - "is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==" - }, - "is-regex": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", - "integrity": "sha512-kvRdxDsxZjhzUX07ZnLydzS1TU/TJlTUHHY4YLL87e37oUA49DfkLqgy+VjFocowy29cKvcSiu+kIv728jTTVg==", - "requires": { - "call-bind": "^1.0.2", - "has-tostringtag": "^1.0.0" - } - }, - "is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", - "requires": { - "call-bind": "^1.0.2" - } - }, - "is-string": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.7.tgz", - "integrity": "sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==", - "requires": { - "has-tostringtag": "^1.0.0" - } - }, - "object-inspect": { - "version": "1.12.2", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.2.tgz", - "integrity": "sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ==" - }, - "object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "has-symbols": "^1.0.3", - "object-keys": "^1.1.1" - } - }, - "string.prototype.trimend": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz", - "integrity": "sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - } - }, - "string.prototype.trimstart": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz", - "integrity": "sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg==", - "requires": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", - "es-abstract": "^1.19.5" - } - }, - "unbox-primitive": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", - "integrity": "sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw==", - "requires": { - "call-bind": "^1.0.2", - "has-bigints": "^1.0.2", - "has-symbols": "^1.0.3", - "which-boxed-primitive": "^1.0.2" - } - } + "is-typed-array": "^1.1.10" } }, "wide-align": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "dev": true, "requires": { "string-width": "^1.0.2 || 2 || 3 || 4" } @@ -17674,7 +15634,7 @@ "xmlbuilder": { "version": "9.0.7", "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", - "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=" + "integrity": "sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==" }, "xmlchars": { "version": "2.2.0", @@ -17759,8 +15719,7 @@ "yargs-parser": { "version": "20.2.9", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "dev": true + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==" }, "yauzl": { "version": "2.10.0", diff --git a/package.json b/package.json index bb08e02b7..ef11d6825 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "isomercms", - "version": "0.17.0", + "version": "0.18.0", "private": true, "scripts": { "build": "tsc -p tsconfig.build.json", @@ -8,7 +8,7 @@ "dev:services": "docker compose up -d", "dev:server": "source .env && ts-node-dev --respawn src/server.js", "dev": "npm run dev:services && npm run dev:server", - "test": "source .env.test && jest", + "test": "source .env.test && jest --runInBand", "release": "npm version $npm_config_isomer_update && git push --tags", "lint": "npx eslint .", "lint-fix": "eslint --ignore-path .gitignore . --fix", @@ -17,6 +17,7 @@ "prepare": "husky install", "version": "auto-changelog -p && git add CHANGELOG.md", "db:migrate": "source .env && npx sequelize-cli db:migrate", + "db:migrate:undo": "source .env && npx sequelize-cli db:migrate:undo", "jump:staging": "source .ssh/.env.staging && ssh -L 5433:$DB_HOST:5432 $SSH_USER@$SSH_HOST -i .ssh/isomercms-staging-bastion.pem", "db:migrate:staging": "source .ssh/.env.staging && npx sequelize-cli db:migrate", "jump:prod": "source .ssh/.env.prod && ssh -L 5433:$DB_HOST:5432 $SSH_USER@$SSH_HOST -i .ssh/isomercms-production-bastion.pem", @@ -33,21 +34,26 @@ "aws-sdk": "^2.946.0", "axios": "^0.21.3", "base-64": "^0.1.0", + "bcrypt": "^5.1.0", "bluebird": "^3.7.2", "body-parser": "^1.19.2", + "cloudmersive-virus-api-client": "^1.2.7", + "connect-session-sequelize": "^7.1.5", + "convict": "^6.2.4", "cookie-parser": "~1.4.5", "cors": "^2.8.5", "crypto-js": "^4.1.1", "dd-trace": "^2.9.1", "debug": "~2.6.9", - "dompurify": "^2.3.1", - "dotenv": "^16.0.1", "exponential-backoff": "^3.1.0", "express": "~4.17.3", + "express-rate-limit": "^6.7.0", + "express-session": "^1.17.3", "file-type": "^16.5.4", "helmet": "^4.6.0", "http-errors": "~1.8.0", "is-svg": "^4.4.0", + "isomorphic-dompurify": "^0.24.0", "isomorphic-git": "^1.18.2", "joi": "^17.4.0", "js-base64": "^2.6.4", @@ -58,6 +64,7 @@ "moment-timezone": "^0.5.35", "morgan": "~1.10.0", "neverthrow": "^4.3.1", + "nocache": "^3.0.4", "otplib": "^12.0.1", "pg": "^8.6.0", "pg-connection-string": "^2.5.0", @@ -85,8 +92,13 @@ "@swc/helpers": "^0.3.8", "@tsconfig/recommended": "^1.0.1", "@types/aws-lambda": "^8.10.106", + "@types/bcrypt": "^5.0.0", + "@types/convict": "^6.1.1", + "@types/cookie-parser": "^1.4.3", "@types/express": "^4.17.13", + "@types/express-session": "^1.17.5", "@types/jest": "^27.4.1", + "@types/lodash": "^4.14.186", "@types/node": "^17.0.21", "@types/supertest": "^2.0.11", "@types/validator": "^13.7.1", @@ -127,6 +139,7 @@ "@root": "src", "@classes": "src/classes", "@errors": "src/errors", + "@config": "src/config", "@logger": "src/logger", "@middleware": "src/middleware", "@routes": "src/routes", diff --git a/src/__mocks__/axios.ts b/src/__mocks__/axios.ts index 4f63804a7..e6c7a7f22 100644 --- a/src/__mocks__/axios.ts +++ b/src/__mocks__/axios.ts @@ -1,6 +1,3 @@ import mockAxios from "jest-mock-axios" -mockAxios.interceptors.request.use(jest.fn()) -mockAxios.interceptors.response.use(jest.fn()) - export default mockAxios diff --git a/src/bootstrap/index.ts b/src/bootstrap/index.ts index 0a1d8553f..7b17e8bd7 100644 --- a/src/bootstrap/index.ts +++ b/src/bootstrap/index.ts @@ -3,28 +3,12 @@ import http from "http" import createDebug from "debug" import { Express } from "express" +import { config } from "@config/config" + import logger from "@logger/logger" const debug = createDebug("isomercms:server") - -/** - * Normalize a port into a number, string, or false. - */ -const normalizePort = (val: string) => { - const port = parseInt(val, 10) - - if (Number.isNaN(port)) { - // named pipe - return val - } - - if (port >= 0) { - // port number - return port - } - - return false -} +const PORT = config.get("port") /** * Create an event listener for HTTP server "error" event. @@ -80,18 +64,17 @@ const createListener = ( } const bootstrapServer = (app: Express) => { - const port = normalizePort(process.env.PORT || "8081") - app.set("port", port) + app.set("port", PORT) // Create HTTP server const server = http.createServer(app) // create event listeners - const onError = createErrorListener(port) - const onListening = createListener(port, server) + const onError = createErrorListener(PORT) + const onListening = createListener(PORT, server) // Listen on provided port, on all network interfaces. - server.listen(port) + server.listen(PORT) server.on("error", onError) server.on("listening", onListening) } diff --git a/src/classes/Collection.js b/src/classes/Collection.js index 1999f4355..66ea97b43 100644 --- a/src/classes/Collection.js +++ b/src/classes/Collection.js @@ -1,4 +1,7 @@ -const yaml = require("yaml") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") require("bluebird") require("lodash") @@ -69,20 +72,22 @@ class Collection { } if (ISOMER_TEMPLATE_PROTECTED_DIRS.includes(collectionName)) throw new ConflictError(protectedFolderConflictErrorMsg(collectionName)) - const newContent = Base64.encode(yaml.stringify(contentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(contentObject)) await collectionConfig.create(newContent) const nav = new File(this.accessToken, this.siteName) const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) navContentObject.links.push({ title: deslugifyCollectionName(collectionName), collection: collectionName, }) - const newNavContent = Base64.encode(yaml.stringify(navContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(navContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) } @@ -118,7 +123,7 @@ class Collection { const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) const newNavLinks = navContentObject.links.filter( (link) => link.collection !== collectionName @@ -127,7 +132,9 @@ class Collection { ...navContentObject, links: newNavLinks, } - const newNavContent = Base64.encode(yaml.stringify(newNavContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(newNavContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) } @@ -191,7 +198,7 @@ class Collection { }, } const newConfigContent = Base64.encode( - yaml.stringify(newConfigContentObject) + sanitizedYamlStringify(newConfigContentObject) ) await collectionConfig.update(newConfigContent, configSha) @@ -200,7 +207,7 @@ class Collection { const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) const newNavLinks = navContentObject.links.map((link) => { if (link.collection === oldCollectionName) { @@ -217,7 +224,9 @@ class Collection { links: newNavLinks, } - const newNavContent = Base64.encode(yaml.stringify(newNavContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(newNavContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) } } diff --git a/src/classes/Config.js b/src/classes/Config.js index b1d7de383..1d33c52cf 100644 --- a/src/classes/Config.js +++ b/src/classes/Config.js @@ -1,5 +1,6 @@ +import { config } from "@config/config" + const _ = require("lodash") -const yaml = require("yaml") const { ConflictError, @@ -8,6 +9,10 @@ const { const { NotFoundError } = require("@errors/NotFoundError") const validateStatus = require("@utils/axios-utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const { genericGitHubAxiosInstance: axios, @@ -15,8 +20,8 @@ const { // Import error -const { GITHUB_ORG_NAME } = process.env -const { BRANCH_REF } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") +const BRANCH_REF = config.get("github.branchRef") class Config { constructor(accessToken, siteName) { @@ -121,7 +126,7 @@ class CollectionConfig extends Config { async read() { const { content, sha } = await super.read() - const contentObject = yaml.parse(Base64.decode(content)) + const contentObject = sanitizedYamlParse(Base64.decode(content)) return { content: contentObject, sha } } @@ -144,7 +149,7 @@ class CollectionConfig extends Config { } } content.collections[collectionName].order.splice(newIndex, 0, item) - const newContent = Base64.encode(yaml.stringify(content)) + const newContent = Base64.encode(sanitizedYamlStringify(content)) await this.update(newContent, sha) } @@ -154,7 +159,7 @@ class CollectionConfig extends Config { const { content, sha } = await this.read() const index = content.collections[collectionName].order.indexOf(item) content.collections[collectionName].order.splice(index, 1) - const newContent = Base64.encode(yaml.stringify(content)) + const newContent = Base64.encode(sanitizedYamlStringify(content)) await this.update(newContent, sha) return { index, item } @@ -166,7 +171,7 @@ class CollectionConfig extends Config { const index = content.collections[collectionName].order.indexOf(oldItem) content.collections[collectionName].order.splice(index, 1) content.collections[collectionName].order.splice(index, 0, newItem) - const newContent = Base64.encode(yaml.stringify(content)) + const newContent = Base64.encode(sanitizedYamlStringify(content)) await this.update(newContent, sha) } @@ -179,7 +184,7 @@ class CollectionConfig extends Config { ) const newContentObject = _.cloneDeep(content) newContentObject.collections[collectionName].order = filteredOrder - const newContent = Base64.encode(yaml.stringify(newContentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(newContentObject)) await this.update(newContent, sha) } @@ -196,7 +201,7 @@ class CollectionConfig extends Config { ) const newContentObject = _.cloneDeep(content) newContentObject.collections[collectionName].order = renamedOrder - const newContent = Base64.encode(yaml.stringify(newContentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(newContentObject)) await this.update(newContent, sha) } diff --git a/src/classes/Directory.js b/src/classes/Directory.js index cf1b80f36..8bc62c723 100644 --- a/src/classes/Directory.js +++ b/src/classes/Directory.js @@ -1,3 +1,5 @@ +import { config } from "@config/config" + const _ = require("lodash") const { BadRequestError } = require("@errors/BadRequestError") @@ -9,8 +11,8 @@ const { genericGitHubAxiosInstance: axios, } = require("@root/services/api/AxiosInstance") -const { GITHUB_ORG_NAME } = process.env -const { BRANCH_REF } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") +const BRANCH_REF = config.get("github.branchRef") class RootType { constructor() { diff --git a/src/classes/File.js b/src/classes/File.js index 3d7b7c5a9..b92e89412 100644 --- a/src/classes/File.js +++ b/src/classes/File.js @@ -1,3 +1,5 @@ +import { config } from "@config/config" + const { BaseIsomerError } = require("@errors/BaseError") const { ConflictError, @@ -13,8 +15,8 @@ const { // Import error -const { GITHUB_ORG_NAME } = process.env -const { BRANCH_REF } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") +const BRANCH_REF = config.get("github.branchRef") class File { constructor(accessToken, siteName) { diff --git a/src/classes/GithubSessionData.ts b/src/classes/GithubSessionData.ts new file mode 100644 index 000000000..22b47dd39 --- /dev/null +++ b/src/classes/GithubSessionData.ts @@ -0,0 +1,24 @@ +export interface GithubSessionDataProps { + currentCommitSha: string + treeSha: string +} + +class GithubSessionData { + private currentCommitSha: GithubSessionDataProps["currentCommitSha"] + + private treeSha: GithubSessionDataProps["treeSha"] + + constructor({ currentCommitSha, treeSha }: GithubSessionDataProps) { + this.currentCommitSha = currentCommitSha + this.treeSha = treeSha + } + + getGithubState() { + return { + currentCommitSha: this.currentCommitSha, + treeSha: this.treeSha, + } + } +} + +export default GithubSessionData diff --git a/src/classes/NetlifyToml.js b/src/classes/NetlifyToml.js index dee696fb7..3e1535b33 100644 --- a/src/classes/NetlifyToml.js +++ b/src/classes/NetlifyToml.js @@ -1,3 +1,5 @@ +import { config } from "@config/config" + const { NotFoundError } = require("@errors/NotFoundError") const validateStatus = require("@utils/axios-utils") @@ -8,8 +10,8 @@ const { // Import error -const { GITHUB_BUILD_ORG_NAME } = process.env -const { GITHUB_BUILD_REPO_NAME } = process.env +const GITHUB_BUILD_ORG_NAME = config.get("github.buildOrgName") +const GITHUB_BUILD_REPO_NAME = config.get("github.buildRepo") class NetlifyToml { constructor(accessToken, siteName) { diff --git a/src/classes/Resource.js b/src/classes/Resource.js index 23e4e73e9..d10f846f1 100644 --- a/src/classes/Resource.js +++ b/src/classes/Resource.js @@ -1,6 +1,5 @@ const Bluebird = require("bluebird") const _ = require("lodash") -const yaml = require("yaml") // Import classes const { NotFoundError } = require("@errors/NotFoundError") @@ -18,6 +17,10 @@ const { sendTree, deslugifyCollectionName, } = require("@utils/utils.js") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") // Constants const RESOURCE_INDEX_PATH = "index.html" @@ -47,7 +50,7 @@ class Resource { layout: "resources-alt", title: deslugifyCollectionName(resourceName), } - const resourceFrontMatter = yaml.stringify(resourceObject) + const resourceFrontMatter = sanitizedYamlStringify(resourceObject) const resourceIndexContent = ["---\n", resourceFrontMatter, "---"].join("") return IsomerFile.create( `${RESOURCE_INDEX_PATH}`, @@ -108,9 +111,11 @@ class Resource { IsomerFile.setFileType(resourceType) const { content, sha } = await IsomerFile.read(RESOURCE_INDEX_PATH) const decodedContent = Base64.decode(content) - const resourceFrontMatterObj = yaml.parse(decodedContent.split("---")[1]) + const resourceFrontMatterObj = sanitizedYamlParse( + decodedContent.split("---")[1] + ) resourceFrontMatterObj.title = deslugifyCollectionName(newResourceName) - const resourceFrontMatter = yaml.stringify(resourceFrontMatterObj) + const resourceFrontMatter = sanitizedYamlStringify(resourceFrontMatterObj) const resourceIndexContent = ["---\n", resourceFrontMatter, "---"].join("") await IsomerFile.update( RESOURCE_INDEX_PATH, diff --git a/src/classes/ResourceRoom.js b/src/classes/ResourceRoom.js index 5eab979d2..95d5d688c 100644 --- a/src/classes/ResourceRoom.js +++ b/src/classes/ResourceRoom.js @@ -1,6 +1,5 @@ const Bluebird = require("bluebird") const _ = require("lodash") -const yaml = require("yaml") // Import Classes const { Config } = require("@classes/Config.js") @@ -13,6 +12,10 @@ const { sendTree, deslugifyCollectionName, } = require("@utils/utils.js") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") // Constants const RESOURCE_ROOM_INDEX_PATH = "index.html" @@ -27,7 +30,7 @@ class ResourceRoom { async get() { const config = new Config(this.accessToken, this.siteName) const { content } = await config.read() - const contentObject = yaml.parse(Base64.decode(content)) + const contentObject = sanitizedYamlParse(Base64.decode(content)) return contentObject.resources_name } @@ -35,11 +38,11 @@ class ResourceRoom { async create(resourceRoom) { const config = new Config(this.accessToken, this.siteName) const { content, sha } = await config.read() - const contentObject = yaml.parse(Base64.decode(content)) + const contentObject = sanitizedYamlParse(Base64.decode(content)) contentObject.resources_name = resourceRoom - const newContent = Base64.encode(yaml.stringify(contentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(contentObject)) // Create index file in resourceRoom const IsomerIndexFile = new File(this.accessToken, this.siteName) @@ -49,7 +52,7 @@ class ResourceRoom { layout: "resources", title: deslugifyCollectionName(resourceRoom), } - const resourceRoomFrontMatter = yaml.stringify(resourceRoomObject) + const resourceRoomFrontMatter = sanitizedYamlStringify(resourceRoomObject) const resourceRoomIndexContent = [ "---\n", resourceRoomFrontMatter, @@ -66,13 +69,15 @@ class ResourceRoom { const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) navContentObject.links.push({ title: deslugifyCollectionName(resourceRoom), resource_room: true, }) - const newNavContent = Base64.encode(yaml.stringify(navContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(navContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) @@ -83,12 +88,12 @@ class ResourceRoom { // Add resource room to config const config = new Config(this.accessToken, this.siteName) const { content: configContent, sha: configSha } = await config.read() - const contentObject = yaml.parse(Base64.decode(configContent)) + const contentObject = sanitizedYamlParse(Base64.decode(configContent)) // Obtain existing resourceRoomName const resourceRoomName = contentObject.resources_name contentObject.resources_name = newResourceRoom - const newContent = Base64.encode(yaml.stringify(contentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(contentObject)) const commitMessage = `Rename resource room from ${resourceRoomName} to ${newResourceRoom}` @@ -99,7 +104,7 @@ class ResourceRoom { const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) const newNavLinks = navContentObject.links.map((link) => { if (link.resource_room === true) { @@ -114,7 +119,9 @@ class ResourceRoom { ...navContentObject, links: newNavLinks, } - const newNavContent = Base64.encode(yaml.stringify(newNavContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(newNavContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) const { currentCommitSha, treeSha } = await getCommitAndTreeSha( @@ -164,9 +171,11 @@ class ResourceRoom { sha: resourceFileSha, } = await IsomerFile.read(RESOURCE_ROOM_INDEX_PATH) const decodedContent = Base64.decode(resourceFileContent) - const resourceFrontMatterObj = yaml.parse(decodedContent.split("---")[1]) + const resourceFrontMatterObj = sanitizedYamlParse( + decodedContent.split("---")[1] + ) resourceFrontMatterObj.title = deslugifyCollectionName(newResourceRoom) - const resourceFrontMatter = yaml.stringify(resourceFrontMatterObj) + const resourceFrontMatter = sanitizedYamlStringify(resourceFrontMatterObj) const resourceIndexContent = ["---\n", resourceFrontMatter, "---"].join("") await IsomerFile.update( RESOURCE_ROOM_INDEX_PATH, @@ -181,21 +190,21 @@ class ResourceRoom { // Delete resource in config const config = new Config(this.accessToken, this.siteName) const { content, sha } = await config.read() - const contentObject = yaml.parse(Base64.decode(content)) + const contentObject = sanitizedYamlParse(Base64.decode(content)) // Obtain resourceRoomName const resourceRoomName = contentObject.resources_name // Delete resourcses_name from Config delete contentObject.resources_name - const newContent = Base64.encode(yaml.stringify(contentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(contentObject)) // Delete resource room in nav if it exists const nav = new File(this.accessToken, this.siteName) const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) // Assumption: only a single resource room exists const newNavLinks = navContentObject.links.filter( @@ -205,7 +214,9 @@ class ResourceRoom { ...navContentObject, links: newNavLinks, } - const newNavContent = Base64.encode(yaml.stringify(newNavContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(newNavContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) // Delete all resources and resourcePages diff --git a/src/classes/Settings.js b/src/classes/Settings.js index bee68f202..33de19ae3 100644 --- a/src/classes/Settings.js +++ b/src/classes/Settings.js @@ -1,12 +1,16 @@ const Bluebird = require("bluebird") const { Base64 } = require("js-base64") const _ = require("lodash") -const yaml = require("yaml") // import classes const { Config } = require("@classes/Config.js") const { File, DataType, HomepageType } = require("@classes/File.js") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") + // Constants const FOOTER_PATH = "footer.yml" const NAVIGATION_PATH = "navigation.yml" @@ -49,7 +53,7 @@ const retrieveSettingsFiles = async ( // homepage requires special extraction as the content is wrapped in front matter if (fileOpKey === "homepage") { const homepageContent = Base64.decode(content) - const homepageFrontMatterObj = yaml.parse( + const homepageFrontMatterObj = sanitizedYamlParse( homepageContent.split("---")[1] ) return { type: fileOpKey, content: homepageFrontMatterObj, sha } @@ -57,7 +61,7 @@ const retrieveSettingsFiles = async ( return { type: fileOpKey, - content: yaml.parse(Base64.decode(content)), + content: sanitizedYamlParse(Base64.decode(content)), sha, } } @@ -209,7 +213,9 @@ class Settings { // To-do: use Git Tree to speed up operations if (!_.isEmpty(configSettings)) { - const newConfigContent = Base64.encode(yaml.stringify(configSettingsObj)) + const newConfigContent = Base64.encode( + sanitizedYamlStringify(configSettingsObj) + ) await configResp.update(newConfigContent, config.sha) // Update title and description in homepage as well if it's changed @@ -222,7 +228,7 @@ class Settings { if (hasTitleChanged) homepageContentObj.title = configSettings.title if (hasDescriptionChanged) homepageContentObj.description = configSettings.description - const homepageFrontMatter = yaml.stringify(homepageContentObj) + const homepageFrontMatter = sanitizedYamlStringify(homepageContentObj) const homepageContent = ["---\n", homepageFrontMatter, "---"].join("") const newHomepageContent = Base64.encode(homepageContent) @@ -232,13 +238,15 @@ class Settings { } if (!_.isEmpty(footerSettings)) { - const newFooterContent = Base64.encode(yaml.stringify(footerSettingsObj)) + const newFooterContent = Base64.encode( + sanitizedYamlStringify(footerSettingsObj) + ) await FooterFile.update(FOOTER_PATH, newFooterContent, footer.sha) } if (!_.isEmpty(navigationSettings)) { const newNavigationContent = Base64.encode( - yaml.stringify(navigationSettingsObj) + sanitizedYamlStringify(navigationSettingsObj) ) await NavigationFile.update( NAVIGATION_PATH, diff --git a/src/classes/UserSessionData.ts b/src/classes/UserSessionData.ts new file mode 100644 index 000000000..eb3f74f06 --- /dev/null +++ b/src/classes/UserSessionData.ts @@ -0,0 +1,54 @@ +export interface IsomerUserProps { + isomerUserId: string + email: string +} +export type GithubUserProps = IsomerUserProps & { + githubId: string + accessToken: string +} + +export type SessionDataProps = IsomerUserProps | GithubUserProps + +/** + * Object containing user information retrieved from the isomercms cookie. + * Not to be used as a general context object. + */ +class UserSessionData { + readonly githubId?: GithubUserProps["githubId"] + + readonly accessToken?: GithubUserProps["accessToken"] + + readonly isomerUserId: SessionDataProps["isomerUserId"] + + readonly email: SessionDataProps["email"] + + private isGithubProps( + sessionDataProps: SessionDataProps + ): sessionDataProps is GithubUserProps { + return (sessionDataProps as GithubUserProps).githubId !== undefined + } + + constructor(props: SessionDataProps) { + if (this.isGithubProps(props)) { + this.githubId = props.githubId + this.accessToken = props.accessToken + } + this.isomerUserId = props.isomerUserId + this.email = props.email + } + + isEmailUser() { + return !this.githubId + } + + getGithubParams() { + return { + githubId: this.githubId, + accessToken: this.accessToken, + isomerUserId: this.isomerUserId, + email: this.email, + } + } +} + +export default UserSessionData diff --git a/src/classes/UserWithSiteSessionData.ts b/src/classes/UserWithSiteSessionData.ts new file mode 100644 index 000000000..6e8c0c0fc --- /dev/null +++ b/src/classes/UserWithSiteSessionData.ts @@ -0,0 +1,27 @@ +import UserSessionData, { SessionDataProps } from "./UserSessionData" + +export type UserWithSiteSessionDataProps = SessionDataProps & { + siteName: string +} + +/** + * Object containing user information retrieved from the isomercms cookie, and the site being accessed. + * Not to be used as a general context object. + */ +class UserWithSiteSessionData extends UserSessionData { + readonly siteName: string + + constructor(props: UserWithSiteSessionDataProps) { + super(props) + this.siteName = props.siteName + } + + getGithubParamsWithSite() { + return { + ...super.getGithubParams(), + siteName: this.siteName, + } + } +} + +export default UserWithSiteSessionData diff --git a/src/classes/index.ts b/src/classes/index.ts new file mode 100644 index 000000000..e90bb5545 --- /dev/null +++ b/src/classes/index.ts @@ -0,0 +1,3 @@ +export * from "./UserSessionData" +export * from "./UserWithSiteSessionData" +export * from "./GithubSessionData" diff --git a/src/config/config.ts b/src/config/config.ts new file mode 100644 index 000000000..522fc8f67 --- /dev/null +++ b/src/config/config.ts @@ -0,0 +1,348 @@ +import convict from "convict" + +convict.addFormat({ + name: "required-string", + validate: (val: any) => { + if (!val) throw new Error("value cannot be empty, null or undefined") + if (typeof val !== "string") throw new Error("value must be a string") + }, +}) + +convict.addFormat({ + name: "required-positive-number", + validate: (val: any) => { + if (val === null || val === undefined || val === "") + throw new Error("value cannot be empty, null or undefined") + if (typeof val !== "number") throw new Error("value must be a number") + }, + coerce: (val: string) => { + const coercedVal = Number(val) + if (isNaN(coercedVal)) { + throw new Error( + "value provided is not a positive number. please provide a valid positive number" + ) + } + if (coercedVal <= 0) { + throw new Error("value must be more than zero") + } + return coercedVal + }, +}) + +convict.addFormat({ + name: "required-boolean", + validate: (val: any) => { + if (val === null || val === undefined) + throw new Error("value cannot be empty, null or undefined") + if (typeof val !== "boolean") throw new Error("value must be a boolean") + }, + coerce: (val: string) => String(val).toLowerCase() === "true", +}) + +// Define a schema +const config = convict({ + env: { + doc: "The application environment.", + env: "NODE_ENV", + format: ["dev", "test", "prod", "staging", "vapt"], + default: "dev", + }, + port: { + doc: "The port to bind.", + env: "PORT", + format: "required-positive-number", + default: 8081, + }, + gitGuardian: { + doc: "API Key for GitGuardian pre-commit hooks", + env: "GITGUARDIAN_API_KEY", + sensitive: true, + format: String, + default: "", + }, + cloudmersiveKey: { + doc: "API Key for Cloudmersive scanning", + env: "CLOUDMERSIVE_API_KEY", + sensitive: true, + format: "required-string", + default: "", + }, + app: { + frontendUrl: { + doc: "URL of the frontend application", + env: "FRONTEND_URL", + format: "required-string", + default: "", + }, + }, + mutexTableName: { + doc: "Name of the DynamoDB table used for mutexes", + env: "MUTEX_TABLE_NAME", + format: "required-string", + default: "isomer-mutexes", + }, + sites: { + pageCount: { + doc: "Number of pages of repos to retrieve from GitHub API", + env: "ISOMERPAGES_REPO_PAGE_COUNT", + format: "required-positive-number", + default: 10, + }, + }, + auth: { + cookieDomain: { + doc: "Domain to set for auth cookie", + env: "COOKIE_DOMAIN", + format: ["localhost", "cms.isomer.gov.sg", "isomer.gov.sg"], + default: "localhost", + }, + tokenExpiry: { + doc: "Expiry duration for auth token in milliseconds", + env: "AUTH_TOKEN_EXPIRY_DURATION_IN_MILLISECONDS", + format: "required-positive-number", + default: 3600000, // 1 hour + }, + jwtSecret: { + doc: "Secret used to sign auth tokens", + env: "JWT_SECRET", + sensitive: true, + format: "required-string", + default: "", + }, + encryptionSecret: { + doc: "Secret used to encrypt access GitHub access token", + env: "ENCRYPTION_SECRET", + sensitive: true, + format: "required-string", + default: "", + }, + maxNumOtpAttempts: { + doc: "Maximum number of OTP attempts allowed", + env: "MAX_NUM_OTP_ATTEMPTS", + format: "required-positive-number", + default: 5, + }, + otpExpiry: { + doc: "Expiry duration for OTP in milliseconds", + env: "OTP_EXPIRY", + format: "required-positive-number", + default: 900000, + }, + otpSecret: { + doc: "Secret used for OTP generation", + env: "OTP_SECRET", + sensitive: true, + format: "required-string", + default: "", + }, + sessionSecret: { + doc: "Secret used for sessions", + env: "SESSION_SECRET", + sensitive: true, + format: "required-string", + default: "", + }, + }, + aws: { + amplify: { + region: { + doc: "AWS region", + env: "AWS_REGION", + format: "required-string", + default: "ap-southeast-1", + }, + accountNumber: { + doc: "AWS account number (microservices)", + env: "AWS_ACCOUNT_NUMBER", + sensitive: true, + format: String, + default: "", + }, + accessKeyId: { + doc: "AWS access key ID (microservices)", + env: "AWS_ACCESS_KEY_ID", + sensitive: true, + format: String, + default: "", + }, + secretAccessKey: { + doc: "AWS secret access key (microservices)", + env: "AWS_SECRET_ACCESS_KEY", + sensitive: true, + format: String, + default: "", + }, + }, + sqs: { + incomingQueueUrl: { + doc: "URL of the incoming SQS queue", + env: "INCOMING_QUEUE_URL", + format: "required-string", + default: "", + }, + outgoingQueueUrl: { + doc: "URL of the outgoing SQS queue", + env: "OUTGOING_QUEUE_URL", + format: "required-string", + default: "", + }, + }, + }, + github: { + orgName: { + doc: "GitHub organization that owns all site repositories", + env: "GITHUB_ORG_NAME", + format: "required-string", + default: "isomerpages", + }, + buildOrgName: { + doc: "GitHub organization that owns the build repository", + env: "GITHUB_BUILD_ORG_NAME", + format: "required-string", + default: "opengovsg", + }, + buildRepo: { + doc: "Name of the build GitHub repository", + env: "GITHUB_BUILD_REPO_NAME", + format: "required-string", + default: "isomer-build", + }, + clientId: { + doc: "GitHub OAuth app Client ID", + env: "CLIENT_ID", + format: "required-string", + default: "", + }, + clientSecret: { + doc: "GitHub OAuth app Client secret", + env: "CLIENT_SECRET", + sensitive: true, + format: "required-string", + default: "", + }, + redirectUri: { + doc: "URL to redirect to after authentication with GitHub", + env: "REDIRECT_URI", + format: "required-string", + default: "", + }, + branchRef: { + doc: "Git branch to use for saving modifications to site", + env: "BRANCH_REF", + format: "required-string", + default: "staging", + }, + systemToken: { + doc: "GitHub access token to create repo", + env: "SYSTEM_GITHUB_TOKEN", + sensitive: true, + format: "required-string", + default: "", + }, + }, + dataDog: { + env: { + doc: "The DataDog environment", + format: ["development", "local", "staging", "production"], + env: "DD_ENV", + default: "local", + }, + service: { + doc: "The DataDog service", + env: "DD_SERVICE", + format: "required-string", + default: "", + }, + tags: { + doc: "The DataDog tags", + env: "DD_TAGS", + format: "required-string", + default: "", + }, + }, + formSg: { + siteCreateFormKey: { + doc: "FormSG API key for site creation form", + env: "SITE_CREATE_FORM_KEY", + sensitive: true, + format: "required-string", + default: "", + }, + }, + postman: { + apiKey: { + doc: "Postman API key", + env: "POSTMAN_API_KEY", + sensitive: true, + format: "required-string", + default: "", + }, + smsCredName: { + doc: "Postman SMS credential name", + env: "POSTMAN_SMS_CRED_NAME", + format: "required-string", + default: "", + }, + }, + cypress: { + e2eTestRepo: { + doc: "Name of the e2e test GitHub repository", + env: "E2E_TEST_REPO", + format: "required-string", + default: "e2e-test-repo", + }, + e2eTestSecret: { + doc: "Secret for e2e tests", + env: "E2E_TEST_SECRET", + sensitive: true, + format: "required-string", + default: "", + }, + e2eTestGithubToken: { + doc: + "GitHub access token for e2e tests. Replace with your own token and make sure the github user is in your local database", + env: "E2E_TEST_GH_TOKEN", + sensitive: true, + format: "required-string", + default: "", + }, + }, + database: { + dbUri: { + doc: "Database URI", + env: "DB_URI", + sensitive: true, + format: "required-string", + default: "postgres://isomer:password@localhost:54321/isomercms_test", + }, + dbMinPool: { + doc: "Minimum number of connections in the pool", + env: "DB_MIN_POOL", + format: "required-positive-number", + default: 1, + }, + dbMaxPool: { + doc: "Maximum number of connections in the pool", + env: "DB_MAX_POOL", + format: "required-positive-number", + default: 10, + }, + dbEnableLogging: { + doc: "Enable database logging", + env: "DB_ENABLE_LOGGING", + format: "required-boolean", + default: false, + }, + }, +}) + +// Perform validation +// TODO: remove try-catch after prod deployment is successful to avoid blocking +try { + config.validate({ allowed: "strict" }) +} catch (e: any) { + console.log(`Convict error: ${e}`) +} + +export default config +export { config } diff --git a/src/constants/constants.ts b/src/constants/constants.ts index b3b2601cf..bb26d0d6a 100644 --- a/src/constants/constants.ts +++ b/src/constants/constants.ts @@ -1,3 +1,5 @@ +import { config } from "@config/config" + export enum JobStatus { Ready = "READY", // Ready to run jobs Running = "RUNNING", // A job is running @@ -14,3 +16,42 @@ export enum RedirectionTypes { CNAME = "CNAME", A = "A", } + +export enum CollaboratorRoles { + Admin = "ADMIN", + Contributor = "CONTRIBUTOR", +} + +export enum ReviewRequestStatus { + Approved = "APPROVED", + Open = "OPEN", + Merged = "MERGED", + Closed = "CLOSED", +} + +export const E2E_ISOMER_ID = "-1" +export const E2E_TEST_EMAIL = "test@e2e" +export const E2E_TEST_CONTACT = "12345678" + +export const GH_MAX_REPO_COUNT = 100 +export const ISOMERPAGES_REPO_PAGE_COUNT = config.get("sites.pageCount") +export const ISOMER_GITHUB_ORG_NAME = config.get("github.orgName") +export const ISOMER_ADMIN_REPOS = [ + "isomercms-backend", + "isomercms-frontend", + "isomer-redirection", + "isomerpages-template", + "isomer-conversion-scripts", + "isomer-wysiwyg", + "isomer-slackbot", + "isomer-tooling", + "generate-site", + "travisci-scripts", + "recommender-train", + "editor", + "ci-test", + "infra", + "markdown-helper", +] + +export const INACTIVE_USER_THRESHOLD_DAYS = 60 diff --git a/src/database/config.js b/src/database/config.js index cc451d26c..43ded8222 100644 --- a/src/database/config.js +++ b/src/database/config.js @@ -4,9 +4,13 @@ const { parse } = require("pg-connection-string") // We have to manually parse database URL because sequelize-typescript requires explicit // connection parameters. -const { DB_URI, DB_MIN_POOL, DB_MAX_POOL } = process.env +// Note: We are using process.env here instead of convict's config.get() as sequelize-cli is unable +// to support import of TS files inside JS. Note that validation of these envs will still be +// performed by convict in src/config/config.ts. +const { DB_URI } = process.env +const DB_MIN_POOL = parseInt(process.env.DB_MIN_POOL, 10) +const DB_MAX_POOL = parseInt(process.env.DB_MAX_POOL, 10) -if (!DB_URI) throw new Error("DB_URI is not defined") const parsed = parse(DB_URI) const port = parsed.port ? parseInt(parsed.port, 10) : 5432 @@ -32,7 +36,7 @@ module.exports = { updatedAt: "updated_at", }, pool: { - min: DB_MIN_POOL ? parseInt(DB_MIN_POOL, 10) : 1, - max: DB_MAX_POOL ? parseInt(DB_MAX_POOL, 10) : 10, + min: DB_MIN_POOL, + max: DB_MAX_POOL, }, } diff --git a/src/database/migrations/20220726094614-create-isomer-admin.js b/src/database/migrations/20220726094614-create-isomer-admin.js new file mode 100644 index 000000000..c47293c5c --- /dev/null +++ b/src/database/migrations/20220726094614-create-isomer-admin.js @@ -0,0 +1,36 @@ +module.exports = { + up: async (queryInterface, Sequelize) => { + await queryInterface.createTable("isomer_admins", { + id: { + allowNull: false, + autoIncrement: true, + primaryKey: true, + type: Sequelize.BIGINT, + }, + user_id: { + allowNull: false, + type: Sequelize.BIGINT, + references: { + model: "users", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + updated_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + }) + }, + + down: async (queryInterface) => { + await queryInterface.dropTable("isomer_admins") + }, +} diff --git a/src/database/migrations/20220803091224-change-users-github-allow-null.js b/src/database/migrations/20220803091224-change-users-github-allow-null.js new file mode 100644 index 000000000..3ee65c69f --- /dev/null +++ b/src/database/migrations/20220803091224-change-users-github-allow-null.js @@ -0,0 +1,23 @@ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.changeColumn("users", "github_id", { + allowNull: true, + unique: true, + type: Sequelize.TEXT, + validate: { + notEmpty: true, + }, + }) + }, + + async down(queryInterface, Sequelize) { + await queryInterface.changeColumn("users", "github_id", { + allowNull: false, + unique: true, + type: Sequelize.TEXT, + validate: { + notEmpty: true, + }, + }) + }, +} diff --git a/src/database/migrations/20220811070630-change-role-enum.js b/src/database/migrations/20220811070630-change-role-enum.js new file mode 100644 index 000000000..aee6b357e --- /dev/null +++ b/src/database/migrations/20220811070630-change-role-enum.js @@ -0,0 +1,59 @@ +module.exports = { + async up(queryInterface, Sequelize) { + // Change the role enum values in the site_members table + await queryInterface.sequelize.transaction(async (transaction) => { + // 1. Change column type to TEXT + await queryInterface.changeColumn( + "site_members", // name of Source model + "role", // name of column we're modifying + { + type: Sequelize.TEXT, + }, + { transaction } + ) + // 2. Discard enum type + await queryInterface.sequelize.query( + "drop type enum_site_members_role;", + { transaction } + ) + // 3. Change column type to new enum type (fails if inconsistent with existing data) + await queryInterface.changeColumn( + "site_members", // name of Source model + "role", // name of column we're modifying + { + type: Sequelize.ENUM("ADMIN", "CONTRIBUTOR"), + }, + { transaction } + ) + }) + }, + + async down(queryInterface, Sequelize) { + // Change the role enum values in the site_members table + await queryInterface.sequelize.transaction(async (transaction) => { + // 1. Change column type to TEXT + await queryInterface.changeColumn( + "site_members", // name of Source model + "role", // name of column we're modifying + { + type: Sequelize.TEXT, + }, + { transaction } + ) + // 2. Discard enum type + await queryInterface.sequelize.query( + "drop type enum_site_members_role;", + { transaction } + ) + // 3. Change column type to new enum type (fails if inconsistent with existing data) + await queryInterface.changeColumn( + "site_members", // name of Source model + "role", // name of column we're modifying + { + type: Sequelize.ENUM("ADMIN", "USER"), + }, + { transaction } + ) + }) + }, +} diff --git a/src/database/migrations/20220926081632-change-primary-key-site-members.js b/src/database/migrations/20220926081632-change-primary-key-site-members.js new file mode 100644 index 000000000..ebb4222a8 --- /dev/null +++ b/src/database/migrations/20220926081632-change-primary-key-site-members.js @@ -0,0 +1,80 @@ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.sequelize.transaction(async (transaction) => { + Promise.all([ + queryInterface.changeColumn("site_members", "user_id", { + allowNull: false, + primaryKey: false, + type: Sequelize.BIGINT, + references: { + model: "users", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + transaction, + }), + queryInterface.changeColumn("site_members", "site_id", { + type: Sequelize.BIGINT, + allowNull: false, + primaryKey: false, + references: { + model: "sites", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + transaction, + }), + queryInterface.addColumn( + "site_members", // name of Source model + "id", // name of column we're adding + { + unique: true, + allowNull: false, + autoIncrement: true, + primaryKey: true, + type: Sequelize.BIGINT, + transaction, + } + ), + ]) + }) + }, + + async down(queryInterface, Sequelize) { + await queryInterface.sequelize.transaction(async (transaction) => { + Promise.all([ + queryInterface.removeColumn( + "site_members", // name of Source Model + "id", // name of column we want to remove + { transaction } + ), + queryInterface.changeColumn("site_members", "user_id", { + allowNull: false, + primaryKey: true, + type: Sequelize.BIGINT, + references: { + model: "users", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + transaction, + }), + queryInterface.changeColumn("site_members", "site_id", { + type: Sequelize.BIGINT, + allowNull: false, + primaryKey: true, + references: { + model: "sites", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + transaction, + }), + ]) + }) + }, +} diff --git a/src/database/migrations/20220926081632-create-notifications.js b/src/database/migrations/20220926081632-create-notifications.js new file mode 100644 index 000000000..7ce3b876e --- /dev/null +++ b/src/database/migrations/20220926081632-create-notifications.js @@ -0,0 +1,80 @@ +module.exports = { + up: async (queryInterface, Sequelize) => { + await queryInterface.createTable("notifications", { + id: { + allowNull: false, + autoIncrement: true, + primaryKey: true, + type: Sequelize.BIGINT, + }, + site_member_id: { + allowNull: false, + type: Sequelize.BIGINT, + references: { + model: "site_members", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + site_id: { + allowNull: false, + type: Sequelize.BIGINT, + references: { + model: "sites", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + user_id: { + allowNull: false, + type: Sequelize.BIGINT, + references: { + model: "users", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + message: { + allowNull: true, + type: Sequelize.STRING, + }, + link: { + allowNull: true, + type: Sequelize.STRING, + }, + source_username: { + allowNull: false, + type: Sequelize.STRING, + }, + type: { + allowNull: false, + type: Sequelize.STRING, + }, + first_read_time: { + allowNull: true, + type: Sequelize.DATE, + }, + priority: { + allowNull: false, + type: Sequelize.BIGINT, + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + updated_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + }) + }, + + down: async (queryInterface) => { + await queryInterface.dropTable("notifications") + }, +} diff --git a/src/database/migrations/20221003052424-review-request-creation.js b/src/database/migrations/20221003052424-review-request-creation.js new file mode 100644 index 000000000..541b7deea --- /dev/null +++ b/src/database/migrations/20221003052424-review-request-creation.js @@ -0,0 +1,43 @@ +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("review_requests", { + id: { + allowNull: false, + primaryKey: true, + type: Sequelize.BIGINT, + autoIncrement: true, + }, + requestor_id: { + type: Sequelize.BIGINT, + allowNull: false, + references: { + model: "users", + key: "id", + }, + }, + site_id: { + type: Sequelize.BIGINT, + allowNull: false, + references: { + model: "sites", + key: "id", + }, + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + updated_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + }) + }, + + async down(queryInterface, Sequelize) { + await queryInterface.dropTable("review_requests") + }, +} diff --git a/src/database/migrations/20221003123422-review-meta-creation.js b/src/database/migrations/20221003123422-review-meta-creation.js new file mode 100644 index 000000000..3812952de --- /dev/null +++ b/src/database/migrations/20221003123422-review-meta-creation.js @@ -0,0 +1,46 @@ +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("review_meta", { + id: { + allowNull: false, + primaryKey: true, + autoIncrement: true, + type: Sequelize.BIGINT, + }, + review_id: { + type: Sequelize.BIGINT, + allowNull: false, + references: { + model: "review_requests", + key: "id", + }, + }, + // The PR number stored by GitHub + pull_request_number: { + type: Sequelize.BIGINT, + allowNull: false, + }, + // The link to view this RR + review_link: { + unique: true, + type: Sequelize.STRING, + allowNull: false, + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + updated_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + }) + }, + + async down(queryInterface, Sequelize) { + await queryInterface.dropTable("review_meta") + }, +} diff --git a/src/database/migrations/20221003130006-reviewer-creation.js b/src/database/migrations/20221003130006-reviewer-creation.js new file mode 100644 index 000000000..a8e33f2ac --- /dev/null +++ b/src/database/migrations/20221003130006-reviewer-creation.js @@ -0,0 +1,43 @@ +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + up: async (queryInterface, Sequelize) => { + await queryInterface.createTable("reviewers", { + request_id: { + allowNull: false, + primaryKey: true, + type: Sequelize.BIGINT, + references: { + model: "review_requests", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + reviewer_id: { + type: Sequelize.BIGINT, + allowNull: false, + primaryKey: true, + references: { + model: "users", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + updated_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + }) + }, + + down: async (queryInterface) => { + await queryInterface.dropTable("reviewers") + }, +} diff --git a/src/database/migrations/20221007124138-create-review-status.js b/src/database/migrations/20221007124138-create-review-status.js new file mode 100644 index 000000000..d887f57a1 --- /dev/null +++ b/src/database/migrations/20221007124138-create-review-status.js @@ -0,0 +1,30 @@ +module.exports = { + up: async (queryInterface, Sequelize) => + queryInterface.sequelize.transaction(async (t) => { + await queryInterface.addColumn( + "review_requests", // name of Source model + "review_status", // name of column we're adding + { + type: Sequelize.ENUM, + values: ["OPEN", "MERGED", "CLOSED", "APPROVED"], + allowNull: false, + defaultValue: "OPEN", + transaction: t, + } + ) + }), + + down: async (queryInterface, _) => + queryInterface.sequelize.transaction(async (t) => { + await queryInterface.removeColumn( + "review_requests", // name of Source Model + "review_status", // name of column we want to remove + { transaction: t } + ) + // drop created enum + await queryInterface.sequelize.query( + "drop type enum_review_requests_review_status;", + { transaction: t } + ) + }), +} diff --git a/src/database/migrations/20221012064037-create-review-request-views.js b/src/database/migrations/20221012064037-create-review-request-views.js new file mode 100644 index 000000000..18e9c04a7 --- /dev/null +++ b/src/database/migrations/20221012064037-create-review-request-views.js @@ -0,0 +1,59 @@ +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("review_request_views", { + review_request_id: { + allowNull: false, + primaryKey: true, + type: Sequelize.BIGINT, + references: { + model: "review_requests", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + site_id: { + type: Sequelize.BIGINT, + allowNull: false, + primaryKey: true, + references: { + model: "sites", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + user_id: { + type: Sequelize.BIGINT, + allowNull: false, + primaryKey: true, + references: { + model: "users", + key: "id", + }, + onUpdate: "CASCADE", + onDelete: "CASCADE", + }, + last_viewed_at: { + type: Sequelize.DATE, + allowNull: true, + defaultValue: null, + }, + created_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + updated_at: { + type: Sequelize.DATE, + allowNull: false, + defaultValue: Sequelize.fn("NOW"), + }, + }) + }, + + async down(queryInterface, Sequelize) { + await queryInterface.dropTable("review_request_views") + }, +} diff --git a/src/database/migrations/20230125033437-add-sessions.js b/src/database/migrations/20230125033437-add-sessions.js new file mode 100644 index 000000000..8d650ccc0 --- /dev/null +++ b/src/database/migrations/20230125033437-add-sessions.js @@ -0,0 +1,28 @@ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("sessions", { + sid: { + primaryKey: true, + type: Sequelize.STRING(36), + }, + expires: { + type: Sequelize.DATE, + }, + data: { + type: Sequelize.TEXT, + }, + created_at: { + allowNull: false, + type: Sequelize.DATE, + }, + updated_at: { + allowNull: false, + type: Sequelize.DATE, + }, + }) + }, + + async down(queryInterface) { + await queryInterface.dropTable("sessions") + }, +} diff --git a/src/database/migrations/20230214055456-create-otps.js b/src/database/migrations/20230214055456-create-otps.js new file mode 100644 index 000000000..c5f75c587 --- /dev/null +++ b/src/database/migrations/20230214055456-create-otps.js @@ -0,0 +1,49 @@ +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("otps", { + id: { + allowNull: false, + autoIncrement: true, + primaryKey: true, + type: Sequelize.INTEGER, + }, + email: { + allowNull: true, + unique: true, + type: Sequelize.STRING, + }, + mobile_number: { + allowNull: true, + unique: true, + type: Sequelize.STRING, + }, + hashed_otp: { + allowNull: false, + validate: { + notEmpty: true, + }, + type: Sequelize.STRING, + }, + attempts: { + allowNull: false, + type: Sequelize.INTEGER, + }, + expires_at: { + allowNull: false, + type: Sequelize.DATE, + }, + created_at: { + allowNull: false, + type: Sequelize.DATE, + }, + updated_at: { + allowNull: false, + type: Sequelize.DATE, + }, + }) + }, + async down(queryInterface, Sequelize) { + await queryInterface.dropTable("otps") + }, +} diff --git a/src/database/models/IsomerAdmin.ts b/src/database/models/IsomerAdmin.ts new file mode 100644 index 000000000..a6ee22c1b --- /dev/null +++ b/src/database/models/IsomerAdmin.ts @@ -0,0 +1,32 @@ +import { + Column, + CreatedAt, + DataType, + ForeignKey, + Model, + Table, + UpdatedAt, +} from "sequelize-typescript" + +import { User } from "@database/models/User" + +@Table({ tableName: "isomer_admins" }) +export class IsomerAdmin extends Model { + @Column({ + autoIncrement: true, + primaryKey: true, + allowNull: false, + type: DataType.BIGINT, + }) + id!: number + + @ForeignKey(() => User) + @Column + userId!: number + + @CreatedAt + createdAt!: Date + + @UpdatedAt + updatedAt!: Date +} diff --git a/src/database/models/Notification.ts b/src/database/models/Notification.ts new file mode 100644 index 000000000..500ad404d --- /dev/null +++ b/src/database/models/Notification.ts @@ -0,0 +1,88 @@ +import { + DataType, + Column, + Model, + Table, + CreatedAt, + UpdatedAt, + DeletedAt, + BelongsToMany, + HasOne, + BelongsTo, + ForeignKey, +} from "sequelize-typescript" + +import { Site } from "@database/models/Site" +import { SiteMember } from "@database/models/SiteMember" +import { User } from "@database/models/User" + +@Table({ tableName: "notifications" }) +export class Notification extends Model { + @Column({ + autoIncrement: true, + primaryKey: true, + allowNull: false, + type: DataType.BIGINT, + }) + id!: number + + @ForeignKey(() => SiteMember) + siteMemberId!: number + + @BelongsTo(() => SiteMember) + siteMember!: SiteMember + + @ForeignKey(() => Site) + siteId!: number + + @BelongsTo(() => Site) + site!: Site + + @ForeignKey(() => User) + userId!: number + + @BelongsTo(() => User) + user!: Site + + @Column({ + allowNull: true, + type: DataType.TEXT, + }) + message!: string + + @Column({ + allowNull: true, + type: DataType.TEXT, + }) + link!: string + + @Column({ + allowNull: true, + type: DataType.TEXT, + }) + sourceUsername!: string + + @Column({ + allowNull: false, + type: DataType.TEXT, + }) + type!: string + + @Column({ + allowNull: true, + type: DataType.DATE, + }) + firstReadTime!: Date | null + + @Column({ + allowNull: false, + type: DataType.BIGINT, + }) + priority!: number + + @CreatedAt + createdAt!: Date + + @UpdatedAt + updatedAt!: Date +} diff --git a/src/database/models/Otp.ts b/src/database/models/Otp.ts new file mode 100644 index 000000000..c9324acec --- /dev/null +++ b/src/database/models/Otp.ts @@ -0,0 +1,61 @@ +import { + Column, + CreatedAt, + DataType, + Model, + Table, + UpdatedAt, +} from "sequelize-typescript" + +@Table({ tableName: "otps" }) +export class Otp extends Model { + @Column({ + autoIncrement: true, + primaryKey: true, + allowNull: false, + type: DataType.BIGINT, + }) + id!: number + + @Column({ + allowNull: true, + unique: true, + type: DataType.TEXT, + }) + email?: string | null + + @Column({ + allowNull: true, + unique: true, + type: DataType.TEXT, + }) + mobileNumber?: string | null + + @Column({ + allowNull: false, + type: DataType.TEXT, + validate: { + notEmpty: true, + }, + }) + hashedOtp!: string + + // tracks number of times user attempts to submit the OTP code and log in + @Column({ + type: DataType.INTEGER, + defaultValue: 0, + }) + attempts!: number + + @Column({ + allowNull: false, + type: DataType.DATE, + }) + expiresAt!: Date + + @CreatedAt + createdAt!: Date + + @UpdatedAt + updatedAt!: Date +} diff --git a/src/database/models/ReviewMeta.ts b/src/database/models/ReviewMeta.ts new file mode 100644 index 000000000..84212e68d --- /dev/null +++ b/src/database/models/ReviewMeta.ts @@ -0,0 +1,44 @@ +import { + DataType, + Column, + Model, + Table, + ForeignKey, + BelongsTo, +} from "sequelize-typescript" + +import { ReviewRequest } from "./ReviewRequest" +import { User } from "./User" + +@Table({ tableName: "review_meta" }) +// eslint-disable-next-line import/prefer-default-export +export class ReviewMeta extends Model { + @Column({ + autoIncrement: true, + primaryKey: true, + allowNull: false, + type: DataType.BIGINT, + }) + id!: number + + @ForeignKey(() => User) + reviewerId!: number + + @ForeignKey(() => ReviewRequest) + reviewId!: number + + @BelongsTo(() => ReviewRequest) + reviewRequest!: ReviewRequest + + @Column({ + allowNull: false, + type: DataType.BIGINT, + }) + pullRequestNumber!: number + + @Column({ + allowNull: false, + type: DataType.STRING, + }) + reviewLink!: string +} diff --git a/src/database/models/ReviewRequest.ts b/src/database/models/ReviewRequest.ts new file mode 100644 index 000000000..22bd5b737 --- /dev/null +++ b/src/database/models/ReviewRequest.ts @@ -0,0 +1,69 @@ +import { + ForeignKey, + DataType, + Column, + Model, + Table, + BelongsTo, + BelongsToMany, + HasOne, +} from "sequelize-typescript" + +import { Site } from "@database/models/Site" +import { User } from "@database/models/User" +import { ReviewRequestStatus } from "@root/constants" + +import { Reviewer } from "./Reviewers" +import { ReviewMeta } from "./ReviewMeta" + +@Table({ tableName: "review_requests" }) +// eslint-disable-next-line import/prefer-default-export +export class ReviewRequest extends Model { + @Column({ + autoIncrement: true, + primaryKey: true, + allowNull: false, + type: DataType.BIGINT, + }) + id!: number + + @ForeignKey(() => User) + requestorId!: number + + // NOTE: Because this is a FK to User, + // when User is updated/deleted, + // the corresponding row in ReviewRequest will also be updated. + @BelongsTo(() => User, { + onUpdate: "CASCADE", + onDelete: "CASCADE", + }) + requestor!: User + + @ForeignKey(() => Site) + siteId!: number + + // See above comment wrt CASCADE + @BelongsTo(() => Site, { + onUpdate: "CASCADE", + onDelete: "CASCADE", + }) + site!: Site + + @HasOne(() => ReviewMeta) + reviewMeta!: ReviewMeta + + @Column({ + allowNull: false, + defaultValue: "OPEN", + type: DataType.ENUM(...Object.values(ReviewRequestStatus)), + }) + reviewStatus!: ReviewRequestStatus + + @BelongsToMany(() => User, { + onUpdate: "CASCADE", + onDelete: "CASCADE", + through: () => Reviewer, + as: "reviewers", + }) + reviewers!: User[] +} diff --git a/src/database/models/ReviewRequestView.ts b/src/database/models/ReviewRequestView.ts new file mode 100644 index 000000000..43b950b2f --- /dev/null +++ b/src/database/models/ReviewRequestView.ts @@ -0,0 +1,56 @@ +import { + ForeignKey, + DataType, + Column, + Model, + Table, + BelongsTo, + PrimaryKey, +} from "sequelize-typescript" + +import { ReviewRequest } from "@database/models/ReviewRequest" +import { Site } from "@database/models/Site" +import { User } from "@database/models/User" + +@Table({ tableName: "review_request_views" }) +// eslint-disable-next-line import/prefer-default-export +export class ReviewRequestView extends Model { + @ForeignKey(() => ReviewRequest) + @PrimaryKey + @Column + reviewRequestId!: number + + @BelongsTo(() => ReviewRequest, { + onUpdate: "CASCADE", + onDelete: "CASCADE", + }) + reviewRequest!: ReviewRequest + + @ForeignKey(() => Site) + @PrimaryKey + @Column + siteId!: number + + @BelongsTo(() => Site, { + onUpdate: "CASCADE", + onDelete: "CASCADE", + }) + site!: Site + + @ForeignKey(() => User) + @PrimaryKey + @Column + userId!: number + + @BelongsTo(() => User, { + onUpdate: "CASCADE", + onDelete: "CASCADE", + }) + user!: User + + @Column({ + allowNull: true, + type: DataType.DATE, + }) + lastViewedAt!: Date +} diff --git a/src/database/models/Reviewers.ts b/src/database/models/Reviewers.ts new file mode 100644 index 000000000..198ec3a82 --- /dev/null +++ b/src/database/models/Reviewers.ts @@ -0,0 +1,16 @@ +import { Column, ForeignKey, Model, Table } from "sequelize-typescript" + +import { User } from "@database/models/User" + +import { ReviewRequest } from "./ReviewRequest" + +@Table({ tableName: "reviewers" }) +export class Reviewer extends Model { + @ForeignKey(() => User) + @Column + reviewerId!: number + + @ForeignKey(() => ReviewRequest) + @Column + requestId!: string +} diff --git a/src/database/models/Site.ts b/src/database/models/Site.ts index 2b6240622..a75d39ed3 100644 --- a/src/database/models/Site.ts +++ b/src/database/models/Site.ts @@ -33,6 +33,7 @@ export class Site extends Model { @Column({ allowNull: false, type: DataType.TEXT, + unique: true, }) name!: string @@ -69,8 +70,9 @@ export class Site extends Model { onUpdate: "CASCADE", onDelete: "CASCADE", through: () => SiteMember, + as: "site_members", }) - users!: User[] + site_members!: Array @HasOne(() => Repo) repo?: Repo @@ -81,7 +83,9 @@ export class Site extends Model { @ForeignKey(() => User) creatorId!: number - @BelongsTo(() => User) + @BelongsTo(() => User, { + as: "site_creator", + }) creator!: User @HasOne(() => Launch) diff --git a/src/database/models/SiteMember.ts b/src/database/models/SiteMember.ts index cf8a37f28..0f54bee02 100644 --- a/src/database/models/SiteMember.ts +++ b/src/database/models/SiteMember.ts @@ -1,18 +1,31 @@ import { + BelongsTo, Column, CreatedAt, DataType, ForeignKey, + HasMany, Model, Table, UpdatedAt, } from "sequelize-typescript" +import { CollaboratorRoles } from "@constants/index" + +import { Notification } from "@database/models/Notification" import { Site } from "@database/models/Site" import { User } from "@database/models/User" @Table({ tableName: "site_members" }) export class SiteMember extends Model { + @Column({ + autoIncrement: true, + primaryKey: true, + allowNull: false, + type: DataType.BIGINT, + }) + id!: number + @ForeignKey(() => User) @Column userId!: number @@ -23,13 +36,22 @@ export class SiteMember extends Model { @Column({ allowNull: false, - type: DataType.ENUM("ADMIN", "USER"), + type: DataType.ENUM("ADMIN", "CONTRIBUTOR"), }) - role!: boolean + role!: CollaboratorRoles @CreatedAt createdAt!: Date @UpdatedAt updatedAt!: Date + + @BelongsTo(() => Site) + site!: Site + + @BelongsTo(() => User) + user!: User + + @HasMany(() => Notification) + notifications?: Notification[] } diff --git a/src/database/models/User.ts b/src/database/models/User.ts index b73e0bac0..9395b37df 100644 --- a/src/database/models/User.ts +++ b/src/database/models/User.ts @@ -14,7 +14,10 @@ import { Launch } from "@database/models/Launch" import { Site } from "@database/models/Site" import { SiteMember } from "@database/models/SiteMember" +import { ReviewRequest } from "./ReviewRequest" + @Table({ tableName: "users", paranoid: true }) +// eslint-disable-next-line import/prefer-default-export export class User extends Model { @Column({ autoIncrement: true, @@ -32,7 +35,7 @@ export class User extends Model { email?: string | null @Column({ - allowNull: false, + allowNull: true, unique: true, type: DataType.TEXT, validate: { @@ -62,16 +65,22 @@ export class User extends Model { @DeletedAt deletedAt?: Date - @BelongsToMany(() => User, { + @BelongsToMany(() => Site, { onUpdate: "CASCADE", onDelete: "CASCADE", through: () => SiteMember, + as: "site_members", }) - sites!: Site[] + sites!: Array - @HasMany(() => Site) + @HasMany(() => Site, { + as: "sites_created", + }) sitesCreated?: Site[] @HasMany(() => Launch) launches?: Launch[] + + @HasMany(() => ReviewRequest) + reviewRequests?: ReviewRequest[] } diff --git a/src/database/models/index.ts b/src/database/models/index.ts index 698556b16..c6df83264 100644 --- a/src/database/models/index.ts +++ b/src/database/models/index.ts @@ -3,7 +3,14 @@ export * from "@database/models/SiteMember" export * from "@database/models/User" export * from "@database/models/Whitelist" export * from "@database/models/AccessToken" +export * from "@database/models/Otp" export * from "@database/models/Repo" export * from "@database/models/Deployment" export * from "@database/models/Launch" export * from "@database/models/Redirection" +export * from "@database/models/IsomerAdmin" +export * from "@database/models/ReviewMeta" +export * from "@database/models/ReviewRequest" +export * from "@database/models/Reviewers" +export * from "@database/models/Notification" +export * from "@database/models/ReviewRequestView" diff --git a/src/errors/ForbiddenError.js b/src/errors/ForbiddenError.js index 8c0b066cd..3e5919184 100644 --- a/src/errors/ForbiddenError.js +++ b/src/errors/ForbiddenError.js @@ -2,8 +2,8 @@ const { BaseIsomerError } = require("@errors/BaseError") class ForbiddenError extends BaseIsomerError { - constructor() { - super(403, "Access forbidden") + constructor(message) { + super(403, message || "Access forbidden") } } diff --git a/src/errors/RequestNotFoundError.ts b/src/errors/RequestNotFoundError.ts new file mode 100644 index 000000000..6dcd7513b --- /dev/null +++ b/src/errors/RequestNotFoundError.ts @@ -0,0 +1,10 @@ +import { NotFoundError } from "./NotFoundError" + +export default class RequestNotFoundError extends NotFoundError { + constructor(message = "The specified review request could not be found!") { + super() + Error.captureStackTrace(this, this.constructor) + this.name = this.constructor.name + this.message = message + } +} diff --git a/src/fixtures/app.js b/src/fixtures/app.js deleted file mode 100644 index 1a40490b3..000000000 --- a/src/fixtures/app.js +++ /dev/null @@ -1,18 +0,0 @@ -const cookieParser = require("cookie-parser") -const express = require("express") - -const { errorHandler } = require("@middleware/errorHandler") - -function generateRouter(router) { - const app = express() - app.use(express.json({ limit: "7mb" })) - app.use(express.urlencoded({ extended: false })) - app.use(cookieParser()) - app.use(router) - app.use(errorHandler) - return app -} - -module.exports = { - generateRouter, -} diff --git a/src/fixtures/app.ts b/src/fixtures/app.ts new file mode 100644 index 000000000..96bd6fec3 --- /dev/null +++ b/src/fixtures/app.ts @@ -0,0 +1,156 @@ +import cookieParser from "cookie-parser" +import express, { Express } from "express" +import _ from "lodash" + +import { errorHandler } from "@middleware/errorHandler" + +import GithubSessionData from "@classes/GithubSessionData" +import UserSessionData from "@classes/UserSessionData" +import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" + +import { RequestHandler } from "@root/types" + +import { + mockUserSessionData, + mockUserWithSiteSessionData, + mockGithubSessionData, + MOCK_USER_SESSION_DATA_ONE, +} from "./sessionData" +import { MOCK_REPO_NAME_ONE } from "./sites" + +/** + * @deprecated + */ +const attachSessionData: RequestHandler< + unknown, + unknown, + unknown, + unknown, + { + userSessionData: UserSessionData + userWithSiteSessionData: UserWithSiteSessionData + githubSessionData: GithubSessionData + } +> = (req, res, next) => { + res.locals.userSessionData = mockUserSessionData + res.locals.userWithSiteSessionData = mockUserWithSiteSessionData + res.locals.githubSessionData = mockGithubSessionData + next() +} + +const attachUserSessionData: ( + userSessionData: UserSessionData +) => RequestHandler< + unknown, + unknown, + unknown, + unknown, + { userSessionData: UserSessionData } +> = (userSessionData) => (req, res, next) => { + res.locals.userSessionData = userSessionData + next() +} + +const attachUserSessionDataWithSite: ( + userSessionData: UserSessionData, + siteName: string +) => RequestHandler< + unknown, + unknown, + unknown, + unknown, + { + userSessionData: UserSessionData + userWithSiteSessionData: UserWithSiteSessionData + } +> = (userSessionData, siteName) => (req, res, next) => { + const userWithSiteSessionData = new UserWithSiteSessionData({ + isomerUserId: userSessionData.isomerUserId, + email: userSessionData.email, + siteName, + }) + res.locals.userSessionData = userSessionData + res.locals.userWithSiteSessionData = userWithSiteSessionData + next() +} + +const attachDefaultUserSessionData: RequestHandler< + unknown, + unknown, + unknown, + unknown, + { userSessionData: UserSessionData } +> = attachUserSessionData(MOCK_USER_SESSION_DATA_ONE) + +const attachDefaultUserSessionDataWithSite: RequestHandler< + unknown, + unknown, + unknown, + unknown, + { + userSessionData: UserSessionData + userWithSiteSessionData: UserWithSiteSessionData + } +> = attachUserSessionDataWithSite( + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE +) + +/** + * @deprecated + */ +export const generateRouter = (router: Express) => { + const app = express() + app.use(express.json({ limit: "7mb" })) + app.use(express.urlencoded({ extended: false })) + app.use(cookieParser()) + app.use(attachSessionData) + app.use(router) + app.use(errorHandler) + return app +} + +const generateFinalRouter = (router: Express) => { + const app = express() + app.use(express.json({ limit: "7mb" })) + app.use(express.urlencoded({ extended: false })) + app.use(cookieParser()) + app.use(router) + app.use(errorHandler) + return app +} + +export const generateRouterForUser = ( + router: Express, + userSessionData: UserSessionData +) => { + const app = express() + app.use(attachUserSessionData(userSessionData)) + app.use(router) + return generateFinalRouter(app) +} + +export const generateRouterForUserWithSite = ( + router: Express, + userSessionData: UserSessionData, + siteName: string +) => { + const app = express() + app.use(attachUserSessionDataWithSite(userSessionData, siteName)) + app.use(router) + return generateFinalRouter(app) +} + +export const generateRouterForDefaultUser = (router: Express) => { + const app = express() + app.use(attachDefaultUserSessionData) + app.use(router) + return generateFinalRouter(app) +} + +export const generateRouterForDefaultUserWithSite = (router: Express) => { + const app = express() + app.use(attachDefaultUserSessionDataWithSite) + app.use(router) + return generateFinalRouter(app) +} diff --git a/src/fixtures/github.ts b/src/fixtures/github.ts new file mode 100644 index 000000000..0adefcfb6 --- /dev/null +++ b/src/fixtures/github.ts @@ -0,0 +1,139 @@ +import { Commit, RawComment, RawFileChangeInfo } from "@root/types/github" + +import { MOCK_USER_ID_ONE, MOCK_USER_ID_TWO } from "./users" + +export const MOCK_GITHUB_USER_NAME_ONE = "isomergithub1" +export const MOCK_GITHUB_USER_NAME_TWO = "isomergithub2" + +export const MOCK_GITHUB_USER_EMAIL_ONE = + "111718653+isomergithub1@users.noreply.github.com" +export const MOCK_GITHUB_USER_EMAIL_TWO = + "111725612+isomergithub2@users.noreply.github.com" + +export const MOCK_GITHUB_PULL_REQUEST_NUMBER = 251 + +// This is one set of commits and file changes which should be used together +export const MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA = + "a15a7c8b23324f680cd7c5011ca763e36d350f41" +export const MOCK_GITHUB_COMMIT_DATE_ONE = "2022-10-12T06:31:05Z" +export const MOCK_GITHUB_COMMIT_DATE_TWO = "2022-10-13T05:39:43Z" +export const MOCK_GITHUB_COMMIT_DATE_THREE = "2022-11-07T16:32:08Z" +export const MOCK_GITHUB_FILENAME_ALPHA_ONE = "index.md" +export const MOCK_GITHUB_FILEPATH_ALPHA_ONE = "" +export const MOCK_GITHUB_FULL_FILEPATH_ALPHA_ONE = encodeURIComponent( + MOCK_GITHUB_FILENAME_ALPHA_ONE +) +export const MOCK_GITHUB_FILENAME_ALPHA_TWO = "Example Title 22.md" +export const MOCK_GITHUB_FILEPATH_ALPHA_TWO = "pages/" +export const MOCK_GITHUB_FULL_FILEPATH_ALPHA_TWO = encodeURIComponent( + MOCK_GITHUB_FILEPATH_ALPHA_TWO + MOCK_GITHUB_FILENAME_ALPHA_TWO +) +export const MOCK_GITHUB_COMMIT_MESSAGE_ALPHA_ONE = `Update file: ${MOCK_GITHUB_FILENAME_ALPHA_ONE}` +export const MOCK_GITHUB_COMMIT_MESSAGE_ALPHA_TWO = `Update file: ${MOCK_GITHUB_FILENAME_ALPHA_TWO}` +export const MOCK_GITHUB_COMMIT_MESSAGE_OBJECT_ALPHA_ONE = { + message: MOCK_GITHUB_COMMIT_MESSAGE_ALPHA_ONE, + fileName: MOCK_GITHUB_FILENAME_ALPHA_ONE, + userId: MOCK_USER_ID_ONE, +} +export const MOCK_GITHUB_COMMIT_MESSAGE_OBJECT_ALPHA_TWO = { + message: MOCK_GITHUB_COMMIT_MESSAGE_ALPHA_TWO, + fileName: MOCK_GITHUB_FILENAME_ALPHA_TWO, + userId: MOCK_USER_ID_ONE, +} +export const MOCK_GITHUB_COMMIT_MESSAGE_OBJECT_ALPHA_THREE = { + message: MOCK_GITHUB_COMMIT_MESSAGE_ALPHA_TWO, + fileName: MOCK_GITHUB_FILENAME_ALPHA_TWO, + userId: MOCK_USER_ID_TWO, +} + +export const MOCK_GITHUB_FILE_CHANGE_INFO_ALPHA_ONE: RawFileChangeInfo = { + sha: "66804d21ba86f1a193c31714bc15e388c2013a57", + filename: MOCK_GITHUB_FILENAME_ALPHA_ONE, + status: "modified", + additions: 1, + deletions: 2, + changes: 3, + blob_url: `https://github.com/isomerpages/a-test-v4/blob/${MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA}/${MOCK_GITHUB_FULL_FILEPATH_ALPHA_ONE}`, + raw_url: `https://github.com/isomerpages/a-test-v4/raw/${MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA}/${MOCK_GITHUB_FULL_FILEPATH_ALPHA_ONE}`, + contents_url: `https://api.github.com/repos/isomerpages/a-test-v4/contents/${MOCK_GITHUB_FULL_FILEPATH_ALPHA_ONE}?ref=${MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA}`, +} +export const MOCK_GITHUB_FILE_CHANGE_INFO_ALPHA_TWO: RawFileChangeInfo = { + sha: "f04f18eaa8d31fffc9f8cf5020b1f6a765ac225f", + filename: `${MOCK_GITHUB_FILEPATH_ALPHA_TWO}${MOCK_GITHUB_FILENAME_ALPHA_TWO}`, + status: "modified", + additions: 13, + deletions: 2, + changes: 15, + blob_url: `https://github.com/isomerpages/a-test-v4/blob/${MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA}/${MOCK_GITHUB_FULL_FILEPATH_ALPHA_TWO}`, + raw_url: `https://github.com/isomerpages/a-test-v4/raw/${MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA}/${MOCK_GITHUB_FULL_FILEPATH_ALPHA_TWO}`, + contents_url: `https://api.github.com/repos/isomerpages/a-test-v4/contents/${MOCK_GITHUB_FULL_FILEPATH_ALPHA_TWO}?ref=${MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA}`, +} + +export const MOCK_GITHUB_COMMIT_ALPHA_ONE: Commit = { + url: + "https://api.github.com/repos/isomerpages/a-test-v4/commits/a79525f0d188880b965053bc0df25a041b476fad", + sha: "a79525f0d188880b965053bc0df25a041b476fad", + commit: { + url: + "https://api.github.com/repos/isomerpages/a-test-v4/git/commits/a79525f0d188880b965053bc0df25a041b476fad", + author: { + name: MOCK_GITHUB_USER_NAME_ONE, + email: MOCK_GITHUB_USER_EMAIL_ONE, + date: MOCK_GITHUB_COMMIT_DATE_ONE, + }, + message: JSON.stringify(MOCK_GITHUB_COMMIT_MESSAGE_OBJECT_ALPHA_ONE), + }, +} +export const MOCK_GITHUB_COMMIT_ALPHA_TWO: Commit = { + url: + "https://api.github.com/repos/isomerpages/a-test-v4/commits/ad2b13184f8ee1030636c304737941146bd67f4d", + sha: "ad2b13184f8ee1030636c304737941146bd67f4d", + commit: { + url: + "https://api.github.com/repos/isomerpages/a-test-v4/git/commits/ad2b13184f8ee1030636c304737941146bd67f4d", + author: { + name: MOCK_GITHUB_USER_NAME_TWO, + email: MOCK_GITHUB_USER_EMAIL_TWO, + date: MOCK_GITHUB_COMMIT_DATE_TWO, + }, + message: JSON.stringify(MOCK_GITHUB_COMMIT_MESSAGE_OBJECT_ALPHA_TWO), + }, +} +export const MOCK_GITHUB_COMMIT_ALPHA_THREE: Commit = { + url: `https://api.github.com/repos/isomerpages/a-test-v4/commits/${MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA}`, + sha: MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA, + commit: { + url: `https://api.github.com/repos/isomerpages/a-test-v4/git/commits/${MOCK_GITHUB_COMMIT_SHA_LATEST_ALPHA}`, + author: { + name: MOCK_GITHUB_USER_NAME_ONE, + email: MOCK_GITHUB_USER_EMAIL_ONE, + date: MOCK_GITHUB_COMMIT_DATE_THREE, + }, + message: JSON.stringify(MOCK_GITHUB_COMMIT_MESSAGE_OBJECT_ALPHA_THREE), + }, +} +// end of set + +export const MOCK_GITHUB_COMMENT_BODY_ONE = "Comment 1" +export const MOCK_GITHUB_COMMENT_BODY_TWO = "Comment 2" + +export const MOCK_GITHUB_COMMENT_OBJECT_ONE = { + message: MOCK_GITHUB_COMMENT_BODY_ONE, + fileName: MOCK_GITHUB_FILENAME_ALPHA_ONE, + userId: MOCK_USER_ID_ONE, +} + +export const MOCK_GITHUB_COMMENT_OBJECT_TWO = { + message: MOCK_GITHUB_COMMENT_BODY_TWO, + fileName: MOCK_GITHUB_FILENAME_ALPHA_TWO, + userId: MOCK_USER_ID_TWO, +} + +export const MOCK_GITHUB_RAWCOMMENT_ONE: RawComment = { + body: JSON.stringify(MOCK_GITHUB_COMMENT_OBJECT_ONE), + created_at: MOCK_GITHUB_COMMIT_DATE_ONE, +} +export const MOCK_GITHUB_RAWCOMMENT_TWO: RawComment = { + body: JSON.stringify(MOCK_GITHUB_COMMENT_OBJECT_TWO), + created_at: MOCK_GITHUB_COMMIT_DATE_THREE, +} diff --git a/src/fixtures/identity.ts b/src/fixtures/identity.ts index a4953d4a4..6e199281e 100644 --- a/src/fixtures/identity.ts +++ b/src/fixtures/identity.ts @@ -1,3 +1,12 @@ +import { Attributes } from "sequelize/types" + +import { config } from "@config/config" + +import { User, SiteMember } from "@database/models" +import { Author } from "@root/types/github" + +import { mockIsomerUserId } from "./sessionData" + export const mockRecipient = "hello@world.com" export const mockSubject = "mock subject" export const mockBody = "somebody" @@ -12,9 +21,179 @@ export const mockHeaders = { } export const mockSiteName = "hello world" export const mockUserId = "some user id" +export const mockSiteId = "16" export const mockBearerTokenHeaders = { headers: { - Authorization: `Bearer ${process.env.POSTMAN_API_KEY}`, + Authorization: `Bearer ${config.get("postman.apiKey")}`, + }, +} + +export const MOCK_IDENTITY_EMAIL_ONE = "test1@test.gov.sg" +export const MOCK_IDENTITY_EMAIL_TWO = "test2@test.gov.sg" +export const MOCK_IDENTITY_EMAIL_THREE = "test3@test.gov.sg" +export const MOCK_IDENTITY_EMAIL_FOUR = "test4@test.gov.sg" + +export const mockCollaboratorContributor1: Attributes & { + SiteMember: Attributes +} = { + id: 1, + email: MOCK_IDENTITY_EMAIL_ONE, + githubId: "test1", + lastLoggedIn: new Date("2022-07-30T07:41:09.661Z"), + createdAt: new Date("2022-04-04T07:25:41.013Z"), + updatedAt: new Date("2022-07-30T07:41:09.662Z"), + deletedAt: undefined, + SiteMember: { + userId: 1, + siteId: mockSiteId, + role: "CONTRIBUTOR", + createdAt: new Date("2022-07-29T03:50:49.145Z"), + updatedAt: new Date("2022-07-29T03:50:49.145Z"), + }, + sites: [], +} + +export const mockCollaboratorAdmin1: Attributes & { + SiteMember: Attributes +} = { + id: 2, + email: MOCK_IDENTITY_EMAIL_TWO, + githubId: "test2", + lastLoggedIn: new Date("2022-07-30T07:41:09.661Z"), + createdAt: new Date("2022-04-04T07:25:41.013Z"), + updatedAt: new Date("2022-07-30T07:41:09.662Z"), + deletedAt: undefined, + SiteMember: { + userId: 2, + siteId: mockSiteId, + role: "ADMIN", + createdAt: new Date("2022-07-29T03:50:49.145Z"), + updatedAt: new Date("2022-07-29T03:50:49.145Z"), }, + sites: [], } +export const mockCollaboratorAdmin2: Attributes & { + SiteMember: Attributes +} = { + id: 3, + email: MOCK_IDENTITY_EMAIL_THREE, + githubId: "test3", + lastLoggedIn: new Date("2022-06-30T07:41:09.661Z"), + createdAt: new Date("2022-04-04T07:25:41.013Z"), + updatedAt: new Date("2022-07-30T07:41:09.662Z"), + deletedAt: undefined, + SiteMember: { + userId: 3, + siteId: mockSiteId, + role: "ADMIN", + createdAt: new Date("2022-07-29T03:50:49.145Z"), + updatedAt: new Date("2022-07-29T03:50:49.145Z"), + }, + sites: [], +} +export const mockCollaboratorContributor2: Attributes & { + SiteMember: Attributes +} = { + id: 4, + email: MOCK_IDENTITY_EMAIL_FOUR, + githubId: "test4", + lastLoggedIn: new Date("2022-07-30T07:41:09.661Z"), + createdAt: new Date("2022-04-04T07:25:41.013Z"), + updatedAt: new Date("2022-07-30T07:41:09.662Z"), + deletedAt: undefined, + SiteMember: { + userId: 4, + siteId: mockSiteId, + role: "CONTRIBUTOR", + createdAt: new Date("2022-07-29T03:50:49.145Z"), + updatedAt: new Date("2022-07-29T03:50:49.145Z"), + }, + sites: [], +} + +export const unsortedMockCollaboratorsList = [ + mockCollaboratorContributor1, + mockCollaboratorAdmin1, + mockCollaboratorAdmin2, + mockCollaboratorContributor2, +] + +export const expectedSortedMockCollaboratorsList = [ + mockCollaboratorAdmin2, + mockCollaboratorAdmin1, + mockCollaboratorContributor1, + mockCollaboratorContributor2, +] + +export const mockSiteOrmResponseWithAllCollaborators = { + id: 1, + name: "", + site_members: unsortedMockCollaboratorsList, +} +export const mockSiteOrmResponseWithOneAdminCollaborator = { + id: 1, + name: "", + site_members: [mockCollaboratorAdmin1], +} +export const mockSiteOrmResponseWithOneContributorCollaborator = { + id: 1, + name: "", + site_members: [mockCollaboratorContributor2], +} +export const mockSiteOrmResponseWithNoCollaborators = { + id: 1, + site_members: "", +} + +export const MOCK_COMMIT_MESSAGE_ONE = "Update file: Example.md" +export const MOCK_COMMIT_FILENAME_ONE = "Example.md" +export const MOCK_COMMIT_FILEPATH_ONE = "test/path/one/" +export const MOCK_GITHUB_NAME_ONE = "testuser" +export const MOCK_GITHUB_EMAIL_ADDRESS_ONE = "test@example.com" +export const MOCK_GITHUB_DATE_ONE = "2022-09-22T04:07:53Z" +export const MOCK_COMMIT_MESSAGE_OBJECT_ONE = { + message: MOCK_COMMIT_MESSAGE_ONE, + fileName: MOCK_COMMIT_FILENAME_ONE, + userId: mockIsomerUserId, +} + +export const MOCK_COMMIT_MESSAGE_TWO = "Update file: Test.md" +export const MOCK_COMMIT_FILENAME_TWO = "Test.md" +export const MOCK_COMMIT_FILEPATH_TWO = "test/path/two/" +export const MOCK_GITHUB_NAME_TWO = "testuser2" +export const MOCK_GITHUB_EMAIL_ADDRESS_TWO = "test2@example.com" +export const MOCK_GITHUB_DATE_TWO = "2022-09-28T06:25:14Z" +export const MOCK_COMMIT_MESSAGE_OBJECT_TWO = { + message: MOCK_COMMIT_MESSAGE_TWO, + fileName: MOCK_COMMIT_FILENAME_TWO, + userId: mockIsomerUserId, +} + +export const MOCK_GITHUB_COMMIT_AUTHOR_ONE: Author = { + name: MOCK_GITHUB_NAME_ONE, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, +} + +export const MOCK_GITHUB_COMMIT_AUTHOR_TWO: Author = { + name: MOCK_GITHUB_NAME_TWO, + email: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + date: MOCK_GITHUB_DATE_TWO, +} + +export const MOCK_GITHUB_COMMENT_ONE = "test comment 1" +export const MOCK_GITHUB_COMMENT_DATA_ONE = { + userId: mockIsomerUserId, + message: MOCK_GITHUB_COMMENT_ONE, + createdAt: MOCK_GITHUB_DATE_ONE, +} + +export const MOCK_GITHUB_COMMENT_TWO = "test comment 2" +export const MOCK_GITHUB_COMMENT_DATA_TWO = { + userId: mockIsomerUserId, + message: MOCK_GITHUB_COMMENT_TWO, + createdAt: MOCK_GITHUB_DATE_TWO, +} + +export const MOCK_COMMON_ACCESS_TOKEN_GITHUB_NAME = "isomergithub1" diff --git a/src/fixtures/markdown-fixtures.ts b/src/fixtures/markdown-fixtures.ts new file mode 100644 index 000000000..ca793a7b3 --- /dev/null +++ b/src/fixtures/markdown-fixtures.ts @@ -0,0 +1,48 @@ +const normalFrontMatter = `layout: simple-page +title: Digital Transformation +permalink: /digital-transformation/ +breadcrumb: Digital Transformation` + +const maliciousFrontMatter = `layout: simple-page +title: Digital Transformation +permalink: /digital-transformation/ +breadcrumb: Digital Transformation` + +const normalPageContent = `### Test header +### **Subheader** +Content +![Image](/path/to-image.jpg)` + +const maliciousPageContent = `### Test header +### **Subheader** +Content +![Image](/path/to-image.jpg)` + +export const normalMarkdownContent = `--- +${normalFrontMatter} +--- +${normalPageContent}` + +export const maliciousMarkdownContent = `--- +${maliciousFrontMatter} +--- +${maliciousPageContent}` + +export const normalJsonObject = { + frontMatter: { + layout: "simple-page", + title: "Digital Transformation", + permalink: "/digital-transformation/", + breadcrumb: "Digital Transformation", + }, + pageContent: normalPageContent, +} +export const maliciousJsonObject = { + frontMatter: { + layout: "simple-page", + title: "Digital Transformation", + permalink: "/digital-transformation/", + breadcrumb: "Digital Transformation", + }, + pageContent: maliciousPageContent, +} diff --git a/src/fixtures/notifications.ts b/src/fixtures/notifications.ts new file mode 100644 index 000000000..637c8df8b --- /dev/null +++ b/src/fixtures/notifications.ts @@ -0,0 +1,52 @@ +const CREATED_TIME = new Date() +const READ_TIME = new Date() +const OLD_READ_TIME = new Date("1995-12-17T03:24:00") + +export const normalPriorityUnreadNotification = { + message: "low priority unread notification", + link: "google.com", + sourceUsername: "user", + type: "sent_request", + firstReadTime: null, + priority: 2, + createdAt: CREATED_TIME, +} + +export const normalPriorityReadNotification = { + ...normalPriorityUnreadNotification, + message: "low priority read notification", + firstReadTime: READ_TIME, +} + +export const highPriorityUnreadNotification = { + ...normalPriorityUnreadNotification, + message: "high priority unread notification", + priority: 1, +} + +export const highPriorityReadNotification = { + ...normalPriorityReadNotification, + message: "high priority read notification", + priority: 1, +} + +export const normalPriorityOldReadNotification = { + ...normalPriorityReadNotification, + message: "low priority old notification", + firstReadTime: OLD_READ_TIME, +} + +export const highPriorityOldReadNotification = { + ...highPriorityReadNotification, + message: "high priority old notification", + firstReadTime: OLD_READ_TIME, +} + +export const formatNotification = (notification: any) => ({ + message: notification.message, + createdAt: CREATED_TIME.toISOString(), + link: notification.link, + isRead: !!notification.firstReadTime, + sourceUsername: notification.sourceUsername, + type: notification.type, +}) diff --git a/src/fixtures/repoInfo.js b/src/fixtures/repoInfo.ts similarity index 57% rename from src/fixtures/repoInfo.js rename to src/fixtures/repoInfo.ts index 25e6dbcd8..49b7474dc 100644 --- a/src/fixtures/repoInfo.js +++ b/src/fixtures/repoInfo.ts @@ -1,8 +1,19 @@ -const repoInfo = { +import { GitHubRepositoryData } from "@root/types/repoInfo" + +export const MOCK_STAGING_URL_GITHUB = "https://repo-staging.netlify.app" +export const MOCK_STAGING_URL_CONFIGYML = + "https://repo-staging-configyml.netlify.app" +export const MOCK_STAGING_URL_DB = "https://repo-staging-db.netlify.app" + +export const MOCK_PRODUCTION_URL_GITHUB = "https://repo-prod.netlify.app" +export const MOCK_PRODUCTION_URL_CONFIGYML = + "https://repo-prod-configyml.netlify.app" +export const MOCK_PRODUCTION_URL_DB = "https://repo-prod-db.netlify.app" + +export const repoInfo: GitHubRepositoryData = { name: "repo", private: false, - description: - "Staging: https://repo-staging.netlify.app | Production: https://repo-prod.netlify.app", + description: `Staging: ${MOCK_STAGING_URL_GITHUB} | Production: ${MOCK_PRODUCTION_URL_GITHUB}`, pushed_at: "2021-09-09T02:41:37Z", permissions: { admin: true, @@ -13,7 +24,7 @@ const repoInfo = { }, } -const repoInfo2 = { +export const repoInfo2: GitHubRepositoryData = { name: "repo2", private: false, description: @@ -28,7 +39,7 @@ const repoInfo2 = { }, } -const adminRepo = { +export const adminRepo: GitHubRepositoryData = { name: "isomercms-backend", private: false, description: @@ -43,7 +54,7 @@ const adminRepo = { }, } -const noAccessRepo = { +export const noAccessRepo: GitHubRepositoryData = { name: "noaccess", private: false, description: @@ -57,10 +68,3 @@ const noAccessRepo = { pull: true, }, } - -module.exports = { - repoInfo, - repoInfo2, - adminRepo, - noAccessRepo, -} diff --git a/src/fixtures/review.ts b/src/fixtures/review.ts new file mode 100644 index 000000000..dbedadd48 --- /dev/null +++ b/src/fixtures/review.ts @@ -0,0 +1,89 @@ +import { Attributes } from "sequelize/types" + +import { ReviewRequestStatus } from "@root/constants" +import { ReviewRequest, ReviewRequestView } from "@root/database/models" +import { Commit, RawPullRequest } from "@root/types/github" + +import { + mockCollaboratorAdmin1, + mockCollaboratorAdmin2, + mockCollaboratorContributor1, + MOCK_COMMIT_FILEPATH_TWO, + MOCK_COMMIT_MESSAGE_OBJECT_ONE, + MOCK_COMMIT_MESSAGE_OBJECT_TWO, + MOCK_GITHUB_COMMIT_AUTHOR_ONE, + MOCK_GITHUB_COMMIT_AUTHOR_TWO, + MOCK_GITHUB_DATE_ONE, +} from "./identity" + +export const MOCK_PULL_REQUEST_FILE_FILENAME_ONE = "file1.txt" +export const MOCK_PULL_REQUEST_FILE_CONTENTSURL_ONE = + "https://api.github.com/repos/octocat/Hello-World/contents/file1.txt?ref=6dcb09b5b57875f334f61aebed695e2e4193db5e" +export const MOCK_PULL_REQUEST_FILE_FILENAME_TWO = "file2.txt" +export const MOCK_PULL_REQUEST_FILE_CONTENTSURL_TWO = + "https://api.github.com/repos/octocat/Hello-World/contents/file2.txt?ref=bbcd538c8e72b8c175046e27cc8f907076331401" + +export const MOCK_PULL_REQUEST_FILECHANGEINFO_ONE = { + filename: MOCK_PULL_REQUEST_FILE_FILENAME_ONE, + contents_url: MOCK_PULL_REQUEST_FILE_CONTENTSURL_ONE, +} +export const MOCK_PULL_REQUEST_FILECHANGEINFO_TWO = { + filename: `${MOCK_COMMIT_FILEPATH_TWO}/${MOCK_PULL_REQUEST_FILE_FILENAME_TWO}`, + contents_url: MOCK_PULL_REQUEST_FILE_CONTENTSURL_TWO, +} + +export const MOCK_PULL_REQUEST_COMMIT_SHA_ONE = + "6dcb09b5b57875f334f61aebed695e2e4193db5e" +export const MOCK_PULL_REQUEST_COMMIT_SHA_TWO = + "bbcd538c8e72b8c175046e27cc8f907076331401" + +export const MOCK_PULL_REQUEST_COMMIT_ONE: Commit = { + sha: MOCK_PULL_REQUEST_COMMIT_SHA_ONE, + url: "fakeUrl", + commit: { + url: "fakeUrl", + author: MOCK_GITHUB_COMMIT_AUTHOR_ONE, + message: JSON.stringify(MOCK_COMMIT_MESSAGE_OBJECT_ONE), + }, +} +export const MOCK_PULL_REQUEST_COMMIT_TWO: Commit = { + sha: MOCK_PULL_REQUEST_COMMIT_SHA_TWO, + url: "fakeUrl", + commit: { + url: "fakeUrl", + author: MOCK_GITHUB_COMMIT_AUTHOR_TWO, + message: JSON.stringify(MOCK_COMMIT_MESSAGE_OBJECT_TWO), + }, +} + +export const MOCK_PULL_REQUEST_TITLE_ONE = "Pull Request 1" +export const MOCK_PULL_REQUEST_BODY_ONE = "Pull Request 1 Description" +export const MOCK_PULL_REQUEST_CHANGED_FILES_ONE = 3 + +export const MOCK_PULL_REQUEST_ONE: RawPullRequest = { + title: MOCK_PULL_REQUEST_TITLE_ONE, + body: MOCK_PULL_REQUEST_BODY_ONE, + changed_files: MOCK_PULL_REQUEST_CHANGED_FILES_ONE, + created_at: MOCK_GITHUB_DATE_ONE, +} + +export const MOCK_REVIEW_REQUEST_ONE: Attributes = { + id: 1, + site: { + name: "Test Site 1", + }, + requestor: mockCollaboratorContributor1, + reviewMeta: { + id: 1, + pullRequestNumber: 1, + reviewLink: "fakeUrl", + }, + reviewStatus: ReviewRequestStatus.Open, + reviewers: [mockCollaboratorAdmin1, mockCollaboratorAdmin2], +} + +export const MOCK_REVIEW_REQUEST_VIEW_ONE: Attributes = { + id: 1, + reviewRequestId: 1, + lastViewedAt: new Date("2022-09-23T00:00:00Z"), +} diff --git a/src/fixtures/sessionData.ts b/src/fixtures/sessionData.ts new file mode 100644 index 000000000..3898ce3b6 --- /dev/null +++ b/src/fixtures/sessionData.ts @@ -0,0 +1,73 @@ +import GithubSessionData from "@root/classes/GithubSessionData" +import UserSessionData from "@root/classes/UserSessionData" +import UserWithSiteSessionData from "@root/classes/UserWithSiteSessionData" + +import { + MOCK_USER_EMAIL_ONE, + MOCK_USER_EMAIL_TWO, + MOCK_USER_EMAIL_THREE, + MOCK_USER_EMAIL_FOUR, + MOCK_USER_ID_ONE, + MOCK_USER_ID_TWO, + MOCK_USER_ID_THREE, + MOCK_USER_ID_FOUR, +} from "./users" + +export const mockAccessToken = "mockAccessToken" +export const mockGithubId = "mockGithubId" +export const mockIsomerUserId = "1" +export const mockEmail = "mockEmail" +export const mockTreeSha = "mockTreeSha" +export const mockCurrentCommitSha = "mockCurrentCommitSha" +export const mockSiteName = "mockSiteName" + +export const mockGithubState = { + treeSha: mockTreeSha, + currentCommitSha: mockCurrentCommitSha, +} + +export const mockUserSessionData = new UserSessionData({ + githubId: mockGithubId, + accessToken: mockAccessToken, + isomerUserId: mockIsomerUserId, + email: mockEmail, +}) + +export const mockUserWithSiteSessionData = new UserWithSiteSessionData({ + githubId: mockGithubId, + accessToken: mockAccessToken, + isomerUserId: mockIsomerUserId, + email: mockEmail, + siteName: mockSiteName, +}) + +export const mockGithubSessionData = new GithubSessionData({ + treeSha: mockTreeSha, + currentCommitSha: mockCurrentCommitSha, +}) +export const mockSessionDataEmailUser = new UserSessionData({ + isomerUserId: mockIsomerUserId, + email: mockEmail, +}) +export const mockSessionDataEmailUserWithSite = new UserWithSiteSessionData({ + isomerUserId: mockIsomerUserId, + email: mockEmail, + siteName: mockSiteName, +}) + +export const MOCK_USER_SESSION_DATA_ONE = new UserSessionData({ + isomerUserId: String(MOCK_USER_ID_ONE), + email: MOCK_USER_EMAIL_ONE, +}) +export const MOCK_USER_SESSION_DATA_TWO = new UserSessionData({ + isomerUserId: String(MOCK_USER_ID_TWO), + email: MOCK_USER_EMAIL_TWO, +}) +export const MOCK_USER_SESSION_DATA_THREE = new UserSessionData({ + isomerUserId: String(MOCK_USER_ID_THREE), + email: MOCK_USER_EMAIL_THREE, +}) +export const MOCK_USER_SESSION_DATA_FOUR = new UserSessionData({ + isomerUserId: String(MOCK_USER_ID_FOUR), + email: MOCK_USER_EMAIL_FOUR, +}) diff --git a/src/fixtures/sites.ts b/src/fixtures/sites.ts new file mode 100644 index 000000000..0aa44f3af --- /dev/null +++ b/src/fixtures/sites.ts @@ -0,0 +1,132 @@ +import { Attributes } from "sequelize/types" + +import { Deployment, Repo, Site, SiteMember } from "@database/models" +import { CollaboratorRoles, JobStatus, SiteStatus } from "@root/constants" + +import { + MOCK_USER_ID_FOUR, + MOCK_USER_ID_ONE, + MOCK_USER_ID_THREE, + MOCK_USER_ID_TWO, +} from "./users" + +export const MOCK_SITE_ID_ONE = 1 +export const MOCK_SITE_ID_TWO = 2 + +export const MOCK_SITE_NAME_ONE = "Human readable site name one" +export const MOCK_SITE_NAME_TWO = "Human readable site name two" + +export const MOCK_SITE_DATE_ONE = new Date("2022-09-23T00:00:00Z") +export const MOCK_SITE_DATE_TWO = new Date("2022-09-25T00:00:00Z") + +export const MOCK_REPO_NAME_ONE = "repo-name-test-one" +export const MOCK_REPO_NAME_TWO = "repo-name-test-two" + +export const MOCK_REPO_URL_ONE = "https://github.com/example/repo-one" +export const MOCK_REPO_URL_TWO = "https://github.com/example/repo-two" + +export const MOCK_DEPLOYMENT_PROD_URL_ONE = + "https://master.gibberishone.amplifyapp.com" +export const MOCK_DEPLOYMENT_PROD_URL_TWO = + "https://master.gibberishtwo.amplifyapp.com" + +export const MOCK_DEPLOYMENT_STAGING_URL_ONE = + "https://staging.gibberishone.amplifyapp.com" +export const MOCK_DEPLOYMENT_STAGING_URL_TWO = + "https://staging.gibberishtwo.amplifyapp.com" + +export const MOCK_SITE_DBENTRY_ONE: Attributes = { + id: MOCK_SITE_ID_ONE, + name: MOCK_REPO_NAME_ONE, + apiTokenName: "unused", + siteStatus: SiteStatus.Launched, + jobStatus: JobStatus.Ready, + creatorId: MOCK_USER_ID_ONE, + createdAt: MOCK_SITE_DATE_ONE, + updatedAt: MOCK_SITE_DATE_ONE, +} + +export const MOCK_SITE_DBENTRY_TWO: Attributes = { + id: MOCK_SITE_ID_TWO, + name: MOCK_REPO_NAME_TWO, + apiTokenName: "unused", + siteStatus: SiteStatus.Launched, + jobStatus: JobStatus.Ready, + creatorId: MOCK_USER_ID_TWO, + createdAt: MOCK_SITE_DATE_TWO, + updatedAt: MOCK_SITE_DATE_TWO, +} + +export const MOCK_REPO_DBENTRY_ONE: Attributes = { + id: 1, + name: MOCK_REPO_NAME_ONE, + url: MOCK_REPO_URL_ONE, + siteId: MOCK_SITE_ID_ONE, + createdAt: MOCK_SITE_DATE_ONE, + updatedAt: MOCK_SITE_DATE_ONE, +} + +export const MOCK_REPO_DBENTRY_TWO: Attributes = { + id: 2, + name: MOCK_REPO_NAME_TWO, + url: MOCK_REPO_URL_TWO, + siteId: MOCK_SITE_ID_TWO, + createdAt: MOCK_SITE_DATE_TWO, + updatedAt: MOCK_SITE_DATE_TWO, +} + +export const MOCK_DEPLOYMENT_DBENTRY_ONE: Attributes = { + id: 1, + siteId: MOCK_SITE_ID_ONE, + productionUrl: MOCK_DEPLOYMENT_PROD_URL_ONE, + stagingUrl: MOCK_DEPLOYMENT_STAGING_URL_ONE, + createdAt: MOCK_SITE_DATE_ONE, + updatedAt: MOCK_SITE_DATE_ONE, + hostingId: "1", +} + +export const MOCK_DEPLOYMENT_DBENTRY_TWO: Attributes = { + id: 2, + siteId: MOCK_SITE_ID_TWO, + productionUrl: MOCK_DEPLOYMENT_PROD_URL_TWO, + stagingUrl: MOCK_DEPLOYMENT_STAGING_URL_TWO, + createdAt: MOCK_SITE_DATE_TWO, + updatedAt: MOCK_SITE_DATE_TWO, + hostingId: "1", +} + +export const MOCK_SITEMEMBER_DBENTRY_ONE: Attributes = { + id: 1, + userId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + role: CollaboratorRoles.Admin, + createdAt: MOCK_SITE_DATE_ONE, + updatedAt: MOCK_SITE_DATE_ONE, +} + +export const MOCK_SITEMEMBER_DBENTRY_TWO: Attributes = { + id: 2, + userId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + role: CollaboratorRoles.Contributor, + createdAt: MOCK_SITE_DATE_ONE, + updatedAt: MOCK_SITE_DATE_ONE, +} + +export const MOCK_SITEMEMBER_DBENTRY_THREE: Attributes = { + id: 3, + userId: MOCK_USER_ID_THREE, + siteId: MOCK_SITE_ID_TWO, + role: CollaboratorRoles.Admin, + createdAt: MOCK_SITE_DATE_TWO, + updatedAt: MOCK_SITE_DATE_TWO, +} + +export const MOCK_SITEMEMBER_DBENTRY_FOUR: Attributes = { + id: 4, + userId: MOCK_USER_ID_FOUR, + siteId: MOCK_SITE_ID_TWO, + role: CollaboratorRoles.Contributor, + createdAt: MOCK_SITE_DATE_TWO, + updatedAt: MOCK_SITE_DATE_TWO, +} diff --git a/src/fixtures/users.ts b/src/fixtures/users.ts new file mode 100644 index 000000000..079c3c966 --- /dev/null +++ b/src/fixtures/users.ts @@ -0,0 +1,55 @@ +import { Attributes } from "sequelize/types" + +import { User } from "@database/models" + +export const MOCK_USER_ID_ONE = 1 +export const MOCK_USER_ID_TWO = 2 +export const MOCK_USER_ID_THREE = 3 +export const MOCK_USER_ID_FOUR = 4 + +export const MOCK_USER_EMAIL_ONE = "one@test.gov.sg" +export const MOCK_USER_EMAIL_TWO = "two@test.gov.sg" +export const MOCK_USER_EMAIL_THREE = "three@test.gov.sg" +export const MOCK_USER_EMAIL_FOUR = "four@test.gov.sg" + +export const MOCK_USER_DATE_ONE = new Date("2022-08-23T00:00:00Z") +export const MOCK_USER_DATE_TWO = new Date("2022-08-25T00:00:00Z") +export const MOCK_USER_DATE_THREE = new Date("2022-08-27T00:00:00Z") +export const MOCK_USER_DATE_FOUR = new Date("2022-08-29T00:00:00Z") + +export const MOCK_USER_LAST_LOGIN_ONE = new Date("2022-09-12T00:00:00Z") +export const MOCK_USER_LAST_LOGIN_TWO = new Date("2022-09-14T00:00:00Z") +export const MOCK_USER_LAST_LOGIN_THREE = new Date("2022-09-16T00:00:00Z") +export const MOCK_USER_LAST_LOGIN_FOUR = new Date("2022-09-18T00:00:00Z") + +export const MOCK_USER_DBENTRY_ONE: Attributes = { + id: MOCK_USER_ID_ONE, + email: MOCK_USER_EMAIL_ONE, + lastLoggedIn: MOCK_USER_LAST_LOGIN_ONE, + createdAt: MOCK_USER_DATE_ONE, + updatedAt: MOCK_USER_DATE_ONE, +} + +export const MOCK_USER_DBENTRY_TWO: Attributes = { + id: MOCK_USER_ID_TWO, + email: MOCK_USER_EMAIL_TWO, + lastLoggedIn: MOCK_USER_LAST_LOGIN_TWO, + createdAt: MOCK_USER_DATE_TWO, + updatedAt: MOCK_USER_DATE_TWO, +} + +export const MOCK_USER_DBENTRY_THREE: Attributes = { + id: MOCK_USER_ID_THREE, + email: MOCK_USER_EMAIL_THREE, + lastLoggedIn: MOCK_USER_LAST_LOGIN_THREE, + createdAt: MOCK_USER_DATE_THREE, + updatedAt: MOCK_USER_DATE_THREE, +} + +export const MOCK_USER_DBENTRY_FOUR: Attributes = { + id: MOCK_USER_ID_FOUR, + email: MOCK_USER_EMAIL_FOUR, + lastLoggedIn: MOCK_USER_LAST_LOGIN_FOUR, + createdAt: MOCK_USER_DATE_FOUR, + updatedAt: MOCK_USER_DATE_FOUR, +} diff --git a/src/fixtures/yaml-fixtures.ts b/src/fixtures/yaml-fixtures.ts new file mode 100644 index 000000000..ac8c5cce5 --- /dev/null +++ b/src/fixtures/yaml-fixtures.ts @@ -0,0 +1,65 @@ +export const normalYamlString = `logo: /path-to/logo.png +links: + - title: TitleA + url: /title-a + - title: TitleB + url: /title-b + - title: TitleC + url: /title-c + sublinks: + - title: sublink-1 + url: /sublink-1 + - title: sublink-2 + url: /sublink-2 +` +export const maliciousYamlString = `logo: /path-to/logo.png +links: + - title: TitleA + url: /title-a + - title: TitleB + url: /title-b + - title: TitleC + url: /title-c + sublinks: + - title: sublink-1 + url: /sublink-1 + - title: sublink-2 + url: /sublink-2 +` + +export const normalYamlObject = { + logo: "/path-to/logo.png", + links: [ + { title: "TitleA", url: "/title-a" }, + { title: "TitleB", url: "/title-b" }, + { + title: "TitleC", + url: "/title-c", + sublinks: [ + { title: "sublink-1", url: "/sublink-1" }, + { title: "sublink-2", url: "/sublink-2" }, + ], + }, + ], +} +export const maliciousYamlObject = { + logo: "/path-to/logo.png", + links: [ + { title: "TitleA", url: "/title-a" }, + { title: "TitleB", url: "/title-b" }, + { + title: "TitleC", + url: "/title-c", + sublinks: [ + { + title: "sublink-1", + url: "/sublink-1", + }, + { + title: "sublink-2", + url: "/sublink-2", + }, + ], + }, + ], +} diff --git a/src/integration/NotificationOnEditHandler.spec.ts b/src/integration/NotificationOnEditHandler.spec.ts new file mode 100644 index 000000000..24d208925 --- /dev/null +++ b/src/integration/NotificationOnEditHandler.spec.ts @@ -0,0 +1,239 @@ +import express from "express" +import request from "supertest" + +import { NotificationOnEditHandler } from "@middleware/notificationOnEditHandler" + +import UserSessionData from "@classes/UserSessionData" + +import { + Notification, + Repo, + Reviewer, + ReviewMeta, + ReviewRequest, + ReviewRequestView, + Site, + SiteMember, + User, + Whitelist, +} from "@database/models" +import { generateRouterForUserWithSite } from "@fixtures/app" +import { + mockEmail, + mockIsomerUserId, + mockSiteName, +} from "@fixtures/sessionData" +import { GitHubService } from "@services/db/GitHubService" +import * as ReviewApi from "@services/db/review" +import { ConfigYmlService } from "@services/fileServices/YmlFileServices/ConfigYmlService" +import { getUsersService, notificationsService } from "@services/identity" +import CollaboratorsService from "@services/identity/CollaboratorsService" +import IsomerAdminsService from "@services/identity/IsomerAdminsService" +import SitesService from "@services/identity/SitesService" +import ReviewRequestService from "@services/review/ReviewRequestService" +import { sequelize } from "@tests/database" + +const mockSiteId = "1" +const mockSiteMemberId = "1" + +const mockGithubService = { + getPullRequest: jest.fn(), + getComments: jest.fn(), +} +const usersService = getUsersService(sequelize) +const reviewRequestService = new ReviewRequestService( + (mockGithubService as unknown) as typeof ReviewApi, + User, + ReviewRequest, + Reviewer, + ReviewMeta, + ReviewRequestView +) +const sitesService = new SitesService({ + siteRepository: Site, + gitHubService: (mockGithubService as unknown) as GitHubService, + configYmlService: (jest.fn() as unknown) as ConfigYmlService, + usersService, + isomerAdminsService: (jest.fn() as unknown) as IsomerAdminsService, + reviewRequestService, +}) +const collaboratorsService = new CollaboratorsService({ + siteRepository: Site, + siteMemberRepository: SiteMember, + sitesService, + usersService, + whitelist: Whitelist, +}) + +const notificationsHandler = new NotificationOnEditHandler({ + reviewRequestService, + collaboratorsService, + sitesService, + notificationsService, +}) + +// Set up express with defaults and use the router under test +const router = express() +const dummySubrouter = express() +dummySubrouter.get("/:siteName/test", async (req, res, next) => + // Dummy subrouter + next() +) +router.use(dummySubrouter) + +// We handle the test slightly differently - jest interprets the end of the test as when the response is sent, +// but we normally create a notification after this response, due to the position of the middleware +// the solution to get tests working is to send a response only after the notification middleware +router.use(async (req, res, next) => { + // Inserts notification handler after all other subrouters + // Needs to be awaited so jest doesn't exit prematurely upon receiving response status + await notificationsHandler.createNotification(req as any, res as any, next) + res.status(200).send(200) +}) +const userSessionData = new UserSessionData({ + isomerUserId: mockIsomerUserId, + email: mockEmail, +}) +const app = generateRouterForUserWithSite(router, userSessionData, mockSiteName) + +describe("Notifications Router", () => { + const mockAdditionalUserId = "2" + const mockAdditionalSiteId = "2" + const mockAdditionalSiteMemberId = "2" + const mockAnotherSiteMemberId = "3" + + beforeAll(async () => { + // Mock github service return + mockGithubService.getPullRequest.mockResolvedValue({ + title: "title", + body: "body", + changed_files: [], + created_at: new Date(), + }) + + // We need to force the relevant tables to start from a clean slate + // Otherwise, some tests may fail due to the auto-incrementing IDs + // not starting from 1 + await User.sync({ force: true }) + await Site.sync({ force: true }) + await Repo.sync({ force: true }) + await SiteMember.sync({ force: true }) + await Notification.sync({ force: true }) + await ReviewMeta.sync({ force: true }) + await ReviewRequest.sync({ force: true }) + + // Set up User and Site table entries + await User.create({ + id: mockIsomerUserId, + }) + await User.create({ + id: mockAdditionalUserId, + }) + await Site.create({ + id: mockSiteId, + name: mockSiteName, + apiTokenName: "token", + jobStatus: "READY", + siteStatus: "LAUNCHED", + creatorId: mockIsomerUserId, + }) + await SiteMember.create({ + userId: mockIsomerUserId, + siteId: mockSiteId, + role: "ADMIN", + id: mockSiteMemberId, + }) + await Repo.create({ + name: mockSiteName, + url: "url", + siteId: mockSiteId, + }) + await SiteMember.create({ + userId: mockAdditionalUserId, + siteId: mockSiteId, + role: "ADMIN", + id: mockAdditionalSiteMemberId, + }) + await Site.create({ + id: mockAdditionalSiteId, + name: "mockSite2", + apiTokenName: "token", + jobStatus: "READY", + siteStatus: "LAUNCHED", + creatorId: mockIsomerUserId, + }) + await SiteMember.create({ + userId: mockIsomerUserId, + siteId: mockAdditionalSiteId, + role: "ADMIN", + id: mockAnotherSiteMemberId, + }) + await Repo.create({ + name: `${mockSiteName}2`, + url: "url", + siteId: mockAdditionalSiteId, + }) + }) + + afterAll(async () => { + await SiteMember.sync({ force: true }) + await Site.sync({ force: true }) + await User.sync({ force: true }) + await Repo.sync({ force: true }) + }) + + describe("createNotification handler", () => { + afterEach(async () => { + // Clean up so that different tests using + // the same notifications don't interfere with each other + await Notification.sync({ force: true }) + await ReviewMeta.sync({ force: true }) + await ReviewRequest.sync({ force: true }) + }) + it("should create a new notification when called", async () => { + // Arrange + await ReviewRequest.create({ + id: 1, + requestorId: mockIsomerUserId, + siteId: mockSiteId, + reviewStatus: "OPEN", + }) + await ReviewMeta.create({ + reviewId: 1, + pullRequestNumber: 1, + reviewLink: "test", + }) + mockGithubService.getComments.mockResolvedValueOnce([]) + + // Act + await request(app).get(`/${mockSiteName}/test`) + + // Assert + // Notification should be sent to all site members other than the creator + expect( + ( + await Notification.findAll({ + where: { + userId: mockAdditionalUserId, + siteId: mockSiteId, + siteMemberId: mockAdditionalSiteMemberId, + firstReadTime: null, + }, + }) + ).length + ).toEqual(1) + expect( + ( + await Notification.findAll({ + where: { + userId: mockIsomerUserId, + siteId: mockSiteId, + siteMemberId: mockSiteMemberId, + firstReadTime: null, + }, + }) + ).length + ).toEqual(0) + }) + }) +}) diff --git a/src/integration/Notifications.spec.ts b/src/integration/Notifications.spec.ts new file mode 100644 index 000000000..79b822be5 --- /dev/null +++ b/src/integration/Notifications.spec.ts @@ -0,0 +1,504 @@ +import express from "express" +import request from "supertest" + +import { + Notification, + Repo, + Reviewer, + ReviewMeta, + ReviewRequest, + ReviewRequestView, + Site, + SiteMember, + User, + Whitelist, +} from "@database/models" +import { generateRouter, generateRouterForUserWithSite } from "@fixtures/app" +import UserSessionData from "@root/classes/UserSessionData" +import UserWithSiteSessionData from "@root/classes/UserWithSiteSessionData" +import { + formatNotification, + highPriorityOldReadNotification, + highPriorityReadNotification, + highPriorityUnreadNotification, + normalPriorityOldReadNotification, + normalPriorityReadNotification, + normalPriorityUnreadNotification, +} from "@root/fixtures/notifications" +import { + mockEmail, + mockIsomerUserId, + mockSiteName, +} from "@root/fixtures/sessionData" +import { getAuthorizationMiddleware } from "@root/middleware" +import { NotificationsRouter as _NotificationsRouter } from "@root/routes/v2/authenticated/notifications" +import { SitesRouter as _SitesRouter } from "@root/routes/v2/authenticated/sites" +import { genericGitHubAxiosInstance } from "@root/services/api/AxiosInstance" +import { GitHubService } from "@root/services/db/GitHubService" +import { ConfigYmlService } from "@root/services/fileServices/YmlFileServices/ConfigYmlService" +import CollaboratorsService from "@root/services/identity/CollaboratorsService" +import SitesService from "@root/services/identity/SitesService" +import ReviewRequestService from "@root/services/review/ReviewRequestService" +import * as ReviewApi from "@services/db/review" +import { + getIdentityAuthService, + getUsersService, + isomerAdminsService, + notificationsService, +} from "@services/identity" +import { sequelize } from "@tests/database" + +const MOCK_SITE = "mockSite" +const MOCK_SITE_ID = "1" +const MOCK_SITE_MEMBER_ID = "1" + +const gitHubService = new GitHubService({ + axiosInstance: genericGitHubAxiosInstance, +}) +const identityAuthService = getIdentityAuthService(gitHubService) +const usersService = getUsersService(sequelize) +const configYmlService = new ConfigYmlService({ gitHubService }) +const reviewRequestService = new ReviewRequestService( + (gitHubService as unknown) as typeof ReviewApi, + User, + ReviewRequest, + Reviewer, + ReviewMeta, + ReviewRequestView +) +const sitesService = new SitesService({ + siteRepository: Site, + gitHubService, + configYmlService, + usersService, + isomerAdminsService, + reviewRequestService, +}) +const collaboratorsService = new CollaboratorsService({ + siteRepository: Site, + siteMemberRepository: SiteMember, + sitesService, + usersService, + whitelist: Whitelist, +}) +const authorizationMiddleware = getAuthorizationMiddleware({ + identityAuthService, + usersService, + isomerAdminsService, + collaboratorsService, +}) +const notificationsRouter = new _NotificationsRouter({ + notificationsService, + authorizationMiddleware, +}) +const notificationsSubrouter = notificationsRouter.getRouter() + +// Set up express with defaults and use the router under test +const subrouter = express() + +subrouter.use("/:siteName", notificationsSubrouter) +const userSessionData = new UserSessionData({ + isomerUserId: mockIsomerUserId, + email: mockEmail, +}) +const app = generateRouterForUserWithSite(subrouter, userSessionData, MOCK_SITE) + +describe("Notifications Router", () => { + const MOCK_ADDITIONAL_USER_ID = "2" + const MOCK_ADDITIONAL_SITE_ID = "2" + const MOCK_ADDITIONAL_SITE_MEMBER_ID = "2" + const MOCK_ANOTHER_SITE_MEMBER_ID = "3" + + beforeAll(async () => { + // We need to force the relevant tables to start from a clean slate + // Otherwise, some tests may fail due to the auto-incrementing IDs + // not starting from 1 + await User.sync({ force: true }) + await Site.sync({ force: true }) + await Repo.sync({ force: true }) + await SiteMember.sync({ force: true }) + await Notification.sync({ force: true }) + + // Set up User and Site table entries + await User.create({ + id: mockIsomerUserId, + }) + await User.create({ + id: MOCK_ADDITIONAL_USER_ID, + }) + await Site.create({ + id: MOCK_SITE_ID, + name: mockSiteName, + apiTokenName: "token", + jobStatus: "READY", + siteStatus: "LAUNCHED", + creatorId: mockIsomerUserId, + }) + await SiteMember.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + role: "ADMIN", + id: MOCK_SITE_MEMBER_ID, + }) + await Repo.create({ + name: MOCK_SITE, + url: "url", + siteId: MOCK_SITE_ID, + }) + await SiteMember.create({ + userId: MOCK_ADDITIONAL_USER_ID, + siteId: MOCK_SITE_ID, + role: "ADMIN", + id: MOCK_ADDITIONAL_SITE_MEMBER_ID, + }) + await Site.create({ + id: MOCK_ADDITIONAL_SITE_ID, + name: `${mockSiteName}2`, + apiTokenName: "token", + jobStatus: "READY", + siteStatus: "LAUNCHED", + creatorId: mockIsomerUserId, + }) + await SiteMember.create({ + userId: mockIsomerUserId, + siteId: MOCK_ADDITIONAL_SITE_ID, + role: "ADMIN", + id: MOCK_ANOTHER_SITE_MEMBER_ID, + }) + await Repo.create({ + name: `${MOCK_SITE}2`, + url: "url", + siteId: MOCK_ADDITIONAL_SITE_ID, + }) + }) + + afterAll(async () => { + await Notification.sync({ force: true }) + await SiteMember.sync({ force: true }) + await Site.sync({ force: true }) + await User.sync({ force: true }) + await Repo.sync({ force: true }) + }) + + describe("GET /", () => { + afterEach(async () => { + // Clean up so that different tests using + // the same notifications don't interfere with each other + await Notification.sync({ force: true }) + }) + it("should return sorted list of most recent notifications if there are no unread", async () => { + // Arrange + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_SITE_MEMBER_ID, + ...highPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_SITE_MEMBER_ID, + ...highPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityReadNotification, + }) + await Notification.create({ + userId: MOCK_ADDITIONAL_USER_ID, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_ADDITIONAL_SITE_MEMBER_ID, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_ADDITIONAL_SITE_ID, + siteMemberId: MOCK_ANOTHER_SITE_MEMBER_ID, + ...normalPriorityUnreadNotification, + }) + // Notifications with different user or site are not returned + const expected = [ + highPriorityReadNotification, + normalPriorityReadNotification, + normalPriorityReadNotification, + normalPriorityReadNotification, + highPriorityOldReadNotification, + normalPriorityOldReadNotification, + ].map((notification) => formatNotification(notification)) + + // Act + const actual = await request(app).get(`/${MOCK_SITE}`) + + // Assert + expect(actual.body).toMatchObject(expected) + }) + + it("should return only unread notifications if there are any", async () => { + // Arrange + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_SITE_MEMBER_ID, + ...highPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_SITE_MEMBER_ID, + ...highPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...highPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...highPriorityUnreadNotification, + }) + await Notification.create({ + userId: MOCK_ADDITIONAL_USER_ID, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_ADDITIONAL_SITE_MEMBER_ID, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_ADDITIONAL_SITE_ID, + siteMemberId: MOCK_ANOTHER_SITE_MEMBER_ID, + ...normalPriorityUnreadNotification, + }) + const expected = [ + highPriorityUnreadNotification, + highPriorityUnreadNotification, + normalPriorityUnreadNotification, + normalPriorityUnreadNotification, + normalPriorityUnreadNotification, + normalPriorityUnreadNotification, + ].map((notification) => formatNotification(notification)) + + // Act + const actual = await request(app).get(`/${MOCK_SITE}`) + + // Assert + expect(actual.body).toMatchObject(expected) + }) + }) + + describe("GET /allNotifications", () => { + afterEach(async () => { + // Clean up so that different tests using + // the same notifications don't interfere with each other + await Notification.sync({ force: true }) + }) + it("should return sorted list of all notifications", async () => { + // Arrange + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_SITE_MEMBER_ID, + ...highPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_SITE_MEMBER_ID, + ...highPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...highPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: MOCK_ADDITIONAL_USER_ID, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_ADDITIONAL_SITE_MEMBER_ID, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_ADDITIONAL_SITE_ID, + siteMemberId: MOCK_ANOTHER_SITE_MEMBER_ID, + ...normalPriorityUnreadNotification, + }) + // Notifications with different user or site are not returned + const expected = [ + highPriorityUnreadNotification, + normalPriorityUnreadNotification, + highPriorityReadNotification, + normalPriorityReadNotification, + normalPriorityReadNotification, + highPriorityOldReadNotification, + normalPriorityOldReadNotification, + ].map((notification) => formatNotification(notification)) + + // Act + const actual = await request(app).get(`/${MOCK_SITE}/allNotifications`) + + // Assert + expect(actual.body).toMatchObject(expected) + }) + }) + + describe("POST /", () => { + afterEach(async () => { + // Clean up so that different tests using + // the same notifications don't interfere with each other + await Notification.sync({ force: true }) + }) + it("should mark all notifications from the user as read", async () => { + // Arrange + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_SITE_MEMBER_ID, + ...highPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_SITE_MEMBER_ID, + ...highPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityOldReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...highPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityReadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_SITE_ID, + siteMemberId: 1, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: MOCK_ADDITIONAL_USER_ID, + siteId: MOCK_SITE_ID, + siteMemberId: MOCK_ADDITIONAL_SITE_MEMBER_ID, + ...normalPriorityUnreadNotification, + }) + await Notification.create({ + userId: mockIsomerUserId, + siteId: MOCK_ADDITIONAL_SITE_ID, + siteMemberId: MOCK_ANOTHER_SITE_MEMBER_ID, + ...normalPriorityUnreadNotification, + }) + const expected = 200 + + // Act + const actual = await request(app).post(`/${MOCK_SITE}`).send({}) + + // Assert + expect(actual.statusCode).toBe(expected) + expect( + await Notification.findAll({ + where: { siteMemberId: 1, first_read_time: null }, + }) + ).toEqual([]) + }) + }) +}) diff --git a/src/integration/Reviews.spec.ts b/src/integration/Reviews.spec.ts new file mode 100644 index 000000000..5aa024117 --- /dev/null +++ b/src/integration/Reviews.spec.ts @@ -0,0 +1,1882 @@ +import express from "express" +import mockAxios from "jest-mock-axios" +import request from "supertest" + +import { ReviewsRouter as _ReviewsRouter } from "@routes/v2/authenticated/review" +import { SitesRouter as _SitesRouter } from "@routes/v2/authenticated/sites" + +import { + IsomerAdmin, + Notification, + Repo, + Reviewer, + ReviewMeta, + ReviewRequest, + ReviewRequestView, + Site, + SiteMember, + User, + Whitelist, +} from "@database/models" +import { generateRouterForUserWithSite } from "@fixtures/app" +import { + MOCK_GITHUB_COMMENT_BODY_ONE, + MOCK_GITHUB_COMMENT_BODY_TWO, + MOCK_GITHUB_COMMIT_ALPHA_ONE, + MOCK_GITHUB_COMMIT_ALPHA_THREE, + MOCK_GITHUB_COMMIT_ALPHA_TWO, + MOCK_GITHUB_COMMIT_DATE_ONE, + MOCK_GITHUB_COMMIT_DATE_THREE, + MOCK_GITHUB_FILENAME_ALPHA_ONE, + MOCK_GITHUB_FILENAME_ALPHA_TWO, + MOCK_GITHUB_FILEPATH_ALPHA_TWO, + MOCK_GITHUB_FILE_CHANGE_INFO_ALPHA_ONE, + MOCK_GITHUB_FILE_CHANGE_INFO_ALPHA_TWO, + MOCK_GITHUB_PULL_REQUEST_NUMBER, + MOCK_GITHUB_RAWCOMMENT_ONE, + MOCK_GITHUB_RAWCOMMENT_TWO, +} from "@fixtures/github" +import { MOCK_GITHUB_DATE_ONE } from "@fixtures/identity" +import { + MOCK_PULL_REQUEST_BODY_ONE, + MOCK_PULL_REQUEST_CHANGED_FILES_ONE, + MOCK_PULL_REQUEST_ONE, + MOCK_PULL_REQUEST_TITLE_ONE, +} from "@fixtures/review" +import { + MOCK_USER_SESSION_DATA_ONE, + MOCK_USER_SESSION_DATA_THREE, + MOCK_USER_SESSION_DATA_TWO, +} from "@fixtures/sessionData" +import { + MOCK_REPO_DBENTRY_ONE, + MOCK_SITEMEMBER_DBENTRY_ONE, + MOCK_SITEMEMBER_DBENTRY_TWO, + MOCK_SITE_DBENTRY_ONE, + MOCK_SITE_ID_ONE, + MOCK_REPO_NAME_ONE, + MOCK_REPO_NAME_TWO, + MOCK_SITE_ID_TWO, +} from "@fixtures/sites" +import { + MOCK_USER_DBENTRY_ONE, + MOCK_USER_DBENTRY_THREE, + MOCK_USER_DBENTRY_TWO, + MOCK_USER_EMAIL_ONE, + MOCK_USER_EMAIL_THREE, + MOCK_USER_EMAIL_TWO, + MOCK_USER_ID_ONE, + MOCK_USER_ID_TWO, +} from "@fixtures/users" +import { ReviewRequestStatus } from "@root/constants" +import { ReviewRequestDto } from "@root/types/dto/review" +import { GitHubService } from "@services/db/GitHubService" +import * as ReviewApi from "@services/db/review" +import { ConfigYmlService } from "@services/fileServices/YmlFileServices/ConfigYmlService" +import { getUsersService, notificationsService } from "@services/identity" +import CollaboratorsService from "@services/identity/CollaboratorsService" +import IsomerAdminsService from "@services/identity/IsomerAdminsService" +import SitesService from "@services/identity/SitesService" +import ReviewRequestService from "@services/review/ReviewRequestService" +import { sequelize } from "@tests/database" + +const gitHubService = new GitHubService({ axiosInstance: mockAxios.create() }) +const configYmlService = new ConfigYmlService({ gitHubService }) +const usersService = getUsersService(sequelize) +const isomerAdminsService = new IsomerAdminsService({ repository: IsomerAdmin }) +const reviewRequestService = new ReviewRequestService( + (gitHubService as unknown) as typeof ReviewApi, + User, + ReviewRequest, + Reviewer, + ReviewMeta, + ReviewRequestView +) +const sitesService = new SitesService({ + siteRepository: Site, + gitHubService, + configYmlService, + usersService, + isomerAdminsService, + reviewRequestService, +}) +const collaboratorsService = new CollaboratorsService({ + siteRepository: Site, + siteMemberRepository: SiteMember, + sitesService, + usersService, + whitelist: Whitelist, +}) + +const ReviewsRouter = new _ReviewsRouter( + reviewRequestService, + usersService, + sitesService, + collaboratorsService, + notificationsService +) +const reviewsSubrouter = ReviewsRouter.getRouter() +const subrouter = express() +subrouter.use("/:siteName", reviewsSubrouter) + +const mockGenericAxios = mockAxios.create() + +describe("Review Requests Integration Tests", () => { + beforeAll(async () => { + // NOTE: Because SitesService uses an axios instance, + // we need to mock the axios instance using es5 named exports + // to ensure that the calls for .get() on the instance + // will actually return a value and not fail. + jest.mock("../services/api/AxiosInstance.ts", () => ({ + __esModule: true, // this property makes it work + genericGitHubAxiosInstance: mockGenericAxios, + })) + + // We need to force the relevant tables to start from a clean slate + // Otherwise, some tests may fail due to the auto-incrementing IDs + // not starting from 1 + await User.sync({ force: true }) + await Site.sync({ force: true }) + await Repo.sync({ force: true }) + await SiteMember.sync({ force: true }) + await Notification.sync({ force: true }) + await ReviewMeta.sync({ force: true }) + + await User.create(MOCK_USER_DBENTRY_ONE) + await User.create(MOCK_USER_DBENTRY_TWO) + await User.create(MOCK_USER_DBENTRY_THREE) + await Site.create(MOCK_SITE_DBENTRY_ONE) + await Repo.create(MOCK_REPO_DBENTRY_ONE) + await SiteMember.create(MOCK_SITEMEMBER_DBENTRY_ONE) + await SiteMember.create(MOCK_SITEMEMBER_DBENTRY_TWO) + }) + + afterAll(async () => { + await SiteMember.destroy({ + where: { + siteId: MOCK_SITE_ID_ONE, + }, + }) + await User.destroy({ + where: { + id: MOCK_USER_ID_ONE, + }, + }) + await User.destroy({ + where: { + id: MOCK_USER_ID_TWO, + }, + }) + await Repo.destroy({ + where: { + siteId: MOCK_SITE_ID_ONE, + }, + }) + await Site.destroy({ + where: { + id: MOCK_SITE_ID_ONE, + }, + }) + }) + + describe("/compare", () => { + it("should get GitHub diff response for a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + mockGenericAxios.get.mockResolvedValueOnce({ + data: { + files: [ + MOCK_GITHUB_FILE_CHANGE_INFO_ALPHA_ONE, + MOCK_GITHUB_FILE_CHANGE_INFO_ALPHA_TWO, + ], + commits: [ + MOCK_GITHUB_COMMIT_ALPHA_ONE, + MOCK_GITHUB_COMMIT_ALPHA_TWO, + MOCK_GITHUB_COMMIT_ALPHA_THREE, + ], + }, + }) + const expected = { + items: [ + { + type: ["page"], + name: MOCK_GITHUB_FILENAME_ALPHA_ONE, + path: [], + url: "www.google.com", + lastEditedBy: MOCK_USER_EMAIL_TWO, // TODO: This should be MOCK_USER_EMAIL_ONE + lastEditedTime: new Date(MOCK_GITHUB_COMMIT_DATE_THREE).getTime(), + }, + { + type: ["page"], + name: MOCK_GITHUB_FILENAME_ALPHA_TWO, + path: MOCK_GITHUB_FILEPATH_ALPHA_TWO.split("/").filter((x) => x), + url: "www.google.com", + lastEditedBy: MOCK_USER_EMAIL_TWO, + lastEditedTime: new Date(MOCK_GITHUB_COMMIT_DATE_THREE).getTime(), + }, + ], + } + + // Act + const actual = await request(app).get(`/${MOCK_REPO_NAME_ONE}/compare`) + + // Assert + expect(actual.statusCode).toEqual(200) + expect(actual.body).toMatchObject(expected) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).get(`/${MOCK_REPO_NAME_ONE}/compare`) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/request", () => { + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should successfully create a review request when valid inputs are provided", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + const mockPullRequest = { + reviewers: [MOCK_USER_EMAIL_ONE], + title: "Fake title", + description: "Fake description", + } + const expected = { + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + } + mockGenericAxios.post.mockResolvedValueOnce({ + data: { + number: MOCK_GITHUB_PULL_REQUEST_NUMBER, + }, + }) + + // Act + const actual = await request(app) + .post(`/${MOCK_REPO_NAME_ONE}/request`) + .send(mockPullRequest) + + // Assert + expect(actual.body).toMatchObject(expected) + expect(actual.statusCode).toEqual(200) + const actualReviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }, + }) + const actualReviewer = await Reviewer.findOne({ + where: { + requestId: actualReviewRequest?.id, + reviewerId: MOCK_USER_ID_ONE, + }, + }) + const actualReviewMeta = await ReviewMeta.findOne({ + where: { + reviewId: actualReviewRequest?.id, + }, + }) + expect(actualReviewRequest).not.toBeNull() + expect(actualReviewer).not.toBeNull() + expect(actualReviewMeta).not.toBeNull() + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_TWO + ) + const mockPullRequest = { + reviewers: [MOCK_USER_EMAIL_TWO], + title: "Fake title", + description: "Fake description", + } + + // Act + const actual = await request(app) + .post(`/${MOCK_REPO_NAME_TWO}/request`) + .send(mockPullRequest) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + const mockPullRequest = { + reviewers: [MOCK_USER_EMAIL_TWO], + title: "Fake title", + description: "Fake description", + } + + // Act + const actual = await request(app) + .post(`/${MOCK_REPO_NAME_ONE}/request`) + .send(mockPullRequest) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 400 if no reviewers are provided", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + const mockPullRequest = { + reviewers: [], + title: "Fake title", + description: "Fake description", + } + + // Act + const actual = await request(app) + .post(`/${MOCK_REPO_NAME_ONE}/request`) + .send(mockPullRequest) + + // Assert + expect(actual.statusCode).toEqual(400) + }) + + it("should return 400 if selected reviewers are not admins", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + const mockPullRequest = { + reviewers: [MOCK_USER_EMAIL_TWO], + title: "Fake title", + description: "Fake description", + } + + // Act + const actual = await request(app) + .post(`/${MOCK_REPO_NAME_ONE}/request`) + .send(mockPullRequest) + + // Assert + expect(actual.statusCode).toEqual(400) + }) + }) + + describe("/summary", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await Reviewer.create({ + requestId: reviewRequest?.id, + reviewerId: MOCK_USER_ID_TWO, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should get the summary of all existing review requests", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + mockGenericAxios.get.mockResolvedValueOnce({ + data: MOCK_PULL_REQUEST_ONE, + }) + mockGenericAxios.get.mockResolvedValueOnce({ + data: [MOCK_GITHUB_RAWCOMMENT_ONE, MOCK_GITHUB_RAWCOMMENT_TWO], + }) + const expected = { + reviews: [ + { + id: String(MOCK_GITHUB_PULL_REQUEST_NUMBER), + author: MOCK_USER_EMAIL_ONE, + status: ReviewRequestStatus.Open, + title: MOCK_PULL_REQUEST_TITLE_ONE, + description: MOCK_PULL_REQUEST_BODY_ONE, + changedFiles: MOCK_PULL_REQUEST_CHANGED_FILES_ONE, + createdAt: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + newComments: 2, + firstView: true, + }, + ], + } + + // Act + const actual = await request(app).get(`/${MOCK_REPO_NAME_ONE}/summary`) + + // Assert + expect(actual.statusCode).toEqual(200) + expect(actual.body).toMatchObject(expected) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).get(`/${MOCK_REPO_NAME_TWO}/summary`) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).get(`/${MOCK_REPO_NAME_ONE}/summary`) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/viewed", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }) + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_TWO, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await ReviewRequestView.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should mark all existing review requests as viewed for the user", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Pre-requisite checks + const countViews = await ReviewRequestView.count({ + where: { + userId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(countViews).toEqual(0) + const countAnotherUserViews = await ReviewRequestView.count({ + where: { + userId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(countAnotherUserViews).toEqual(0) + + // Act + const actual = await request(app).post(`/${MOCK_REPO_NAME_ONE}/viewed`) + + // Assert + expect(actual.statusCode).toEqual(200) + const countViewsAfter = await ReviewRequestView.count({ + where: { + userId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(countViewsAfter).toEqual(2) + const countAnotherUserViewsAfter = await ReviewRequestView.count({ + where: { + userId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(countAnotherUserViewsAfter).toEqual(0) + const countTotalViewsAfter = await ReviewRequestView.count({ + where: {}, + }) + expect(countTotalViewsAfter).toEqual(2) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).post(`/${MOCK_REPO_NAME_TWO}/viewed`) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post(`/${MOCK_REPO_NAME_ONE}/viewed`) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/:requestId GET", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await Reviewer.create({ + requestId: reviewRequest?.id, + reviewerId: MOCK_USER_ID_TWO, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should return the full details of a review request", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + mockGenericAxios.get.mockResolvedValueOnce({ + data: MOCK_PULL_REQUEST_ONE, + }) + mockGenericAxios.get.mockResolvedValueOnce({ + data: { + files: [ + MOCK_GITHUB_FILE_CHANGE_INFO_ALPHA_ONE, + MOCK_GITHUB_FILE_CHANGE_INFO_ALPHA_TWO, + ], + commits: [ + MOCK_GITHUB_COMMIT_ALPHA_ONE, + MOCK_GITHUB_COMMIT_ALPHA_TWO, + MOCK_GITHUB_COMMIT_ALPHA_THREE, + ], + }, + }) + const expected: ReviewRequestDto = { + reviewUrl: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + title: MOCK_PULL_REQUEST_TITLE_ONE, + status: ReviewRequestStatus.Open, + requestor: MOCK_USER_EMAIL_ONE, + reviewers: [MOCK_USER_EMAIL_TWO], + reviewRequestedTime: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + changedItems: [ + { + type: ["page"], + name: MOCK_GITHUB_FILENAME_ALPHA_ONE, + path: [], + url: "www.google.com", + lastEditedBy: MOCK_USER_EMAIL_TWO, // TODO: This should be MOCK_USER_EMAIL_ONE + lastEditedTime: new Date(MOCK_GITHUB_COMMIT_DATE_THREE).getTime(), + }, + { + type: ["page"], + name: MOCK_GITHUB_FILENAME_ALPHA_TWO, + path: MOCK_GITHUB_FILEPATH_ALPHA_TWO.split("/").filter((x) => x), + url: "www.google.com", + lastEditedBy: MOCK_USER_EMAIL_TWO, + lastEditedTime: new Date(MOCK_GITHUB_COMMIT_DATE_THREE).getTime(), + }, + ], + } + + // Act + const actual = await request(app).get( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + expect(actual.body).toEqual({ reviewRequest: expected }) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).get( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).get( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).get(`/${MOCK_REPO_NAME_ONE}/123456`) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/:requestId POST", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should update the review request successfully", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Pre-requisite checks + const reviewerCount = await Reviewer.count({ + where: {}, + }) + expect(reviewerCount).toEqual(0) + + // Act + const actual = await request(app) + .post(`/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`) + .send({ + reviewers: [MOCK_USER_EMAIL_ONE], + }) + + // Assert + expect(actual.statusCode).toEqual(200) + const reviewerCountAfter = await Reviewer.count({ + where: {}, + }) + expect(reviewerCountAfter).toEqual(1) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if the review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post(`/${MOCK_REPO_NAME_ONE}/123456`) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 403 if user is not the original requestor", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(403) + }) + + it("should return 400 if provided reviewers are not admins of the site", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app) + .post(`/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`) + .send({ + reviewers: [MOCK_USER_EMAIL_THREE], + }) + + // Assert + expect(actual.statusCode).toEqual(400) + }) + }) + + describe("/:requestId DELETE", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await Reviewer.create({ + requestId: reviewRequest?.id, + reviewerId: MOCK_USER_ID_TWO, + }) + await ReviewRequestView.create({ + reviewRequestId: reviewRequest?.id, + userId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequestView.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should close the review request successfully", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + mockGenericAxios.patch.mockResolvedValueOnce(null) + + // Act + const actual = await request(app).delete( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if the review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post(`/${MOCK_REPO_NAME_ONE}/123456`) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 403 if user is not the original requestor", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(403) + }) + + it("should return 403 if the user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}` + ) + + // Assert + expect(actual.statusCode).toEqual(403) + }) + }) + + describe("/:requestId/viewed", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }) + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_TWO, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await ReviewRequestView.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should mark the review request as viewed for the user", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Pre-requisite checks + const countViews = await ReviewRequestView.count({ + where: {}, + }) + expect(countViews).toEqual(0) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/viewed` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + const countViewsAfter = await ReviewRequestView.count({ + where: { + userId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(countViewsAfter).toEqual(1) + const countAnotherUserViewsAfter = await ReviewRequestView.count({ + where: { + userId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(countAnotherUserViewsAfter).toEqual(0) + const countTotalViewsAfter = await ReviewRequestView.count({ + where: {}, + }) + expect(countTotalViewsAfter).toEqual(1) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/viewed` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/viewed` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/123456/viewed` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/:requestId/merge", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await Reviewer.create({ + requestId: reviewRequest?.id, + reviewerId: MOCK_USER_ID_TWO, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + await ReviewRequestView.create({ + reviewRequestId: reviewRequest?.id, + siteId: MOCK_SITE_ID_ONE, + userId: MOCK_USER_ID_ONE, + }) + await ReviewRequestView.create({ + reviewRequestId: reviewRequest?.id, + siteId: MOCK_SITE_ID_ONE, + userId: MOCK_USER_ID_TWO, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should merge the pull request successfully", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + mockGenericAxios.post.mockResolvedValueOnce(null) + mockGenericAxios.put.mockResolvedValueOnce(null) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/merge` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(reviewRequest?.reviewStatus).toEqual(ReviewRequestStatus.Merged) + const countViews = await ReviewRequestView.count({ + where: {}, + }) + expect(countViews).toEqual(0) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/merge` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/merge` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/123456/merge` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/:requestId/approve POST", () => { + beforeEach(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await Reviewer.create({ + requestId: reviewRequest?.id, + reviewerId: MOCK_USER_ID_TWO, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterEach(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should allow the reviewer to approve the pull request", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/approve` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(reviewRequest?.reviewStatus).toEqual(ReviewRequestStatus.Approved) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/approve` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/123456/approve` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 403 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/approve` + ) + console.log(actual.error) + + // Assert + expect(actual.statusCode).toEqual(403) + }) + + it("should return 403 if site member is not a reviewer", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/approve` + ) + console.log(actual.error) + + // Assert + expect(actual.statusCode).toEqual(403) + }) + }) + + describe("/:requestId/approve DELETE", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + reviewStatus: ReviewRequestStatus.Approved, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await Reviewer.create({ + requestId: reviewRequest?.id, + reviewerId: MOCK_USER_ID_TWO, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should allow the reviewer to unapprove the pull request", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).delete( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/approve` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(reviewRequest?.reviewStatus).toEqual(ReviewRequestStatus.Open) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).delete( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/approve` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).delete( + `/${MOCK_REPO_NAME_ONE}/123456/approve` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if the user is not a reviewer of the RR", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_ONE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).delete( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/approve` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if the user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).delete( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/approve` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/:requestId/comments GET", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await Reviewer.create({ + requestId: reviewRequest?.id, + reviewerId: MOCK_USER_ID_TWO, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await Reviewer.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should retrieve the comments for the review request", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + mockGenericAxios.get.mockResolvedValueOnce({ + data: [MOCK_GITHUB_RAWCOMMENT_ONE, MOCK_GITHUB_RAWCOMMENT_TWO], + }) + const expected = [ + { + user: MOCK_USER_EMAIL_ONE, + message: MOCK_GITHUB_COMMENT_BODY_ONE, + createdAt: new Date(MOCK_GITHUB_COMMIT_DATE_ONE).getTime(), + isRead: false, + }, + { + user: MOCK_USER_EMAIL_TWO, + message: MOCK_GITHUB_COMMENT_BODY_TWO, + createdAt: new Date(MOCK_GITHUB_COMMIT_DATE_THREE).getTime(), + isRead: false, + }, + ] + + // Act + const actual = await request(app).get( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + expect(actual.body).toEqual(expected) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).get( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).get( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).get( + `/${MOCK_REPO_NAME_ONE}/123456/comments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/:requestId/comments POST", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + }) + + it("should create a new comment for a review request", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + mockGenericAxios.post.mockResolvedValueOnce(null) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/123456/comments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) + + describe("/:requestId/comments/viewedComments", () => { + beforeAll(async () => { + await ReviewRequest.create({ + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }) + const reviewRequest = await ReviewRequest.findOne({ + where: { + requestorId: MOCK_USER_ID_ONE, + siteId: MOCK_SITE_ID_ONE, + }, + }) + await ReviewMeta.create({ + reviewId: reviewRequest?.id, + pullRequestNumber: MOCK_GITHUB_PULL_REQUEST_NUMBER, + reviewLink: `cms.isomer.gov.sg/sites/${MOCK_REPO_NAME_ONE}/review/${MOCK_GITHUB_PULL_REQUEST_NUMBER}`, + }) + + // Avoid race conditions when checking between expected and actual date values + jest.useFakeTimers("modern") + jest.setSystemTime(new Date(MOCK_GITHUB_COMMIT_DATE_ONE).getTime()) + }) + + afterAll(async () => { + await ReviewMeta.destroy({ + where: {}, + }) + await ReviewRequestView.destroy({ + where: {}, + }) + await ReviewRequest.destroy({ + where: {}, + }) + jest.useRealTimers() + }) + + it("should update last viewed timestamp when the user views the review request", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Pre-requisite checks + const countViews = await ReviewRequestView.count({ + where: {}, + }) + expect(countViews).toEqual(0) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments/viewedComments` + ) + + // Assert + expect(actual.statusCode).toEqual(200) + const reviewRequestView = await ReviewRequestView.findOne({ + where: { + userId: MOCK_USER_ID_TWO, + siteId: MOCK_SITE_ID_ONE, + }, + }) + expect(reviewRequestView?.lastViewedAt).toEqual( + new Date(MOCK_GITHUB_COMMIT_DATE_ONE) + ) + }) + + it("should return 404 if site is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_TWO + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_TWO}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments/viewedComments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_THREE, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/${MOCK_GITHUB_PULL_REQUEST_NUMBER}/comments/viewedComments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + + it("should return 404 if review request is not found", async () => { + // Arrange + const app = generateRouterForUserWithSite( + subrouter, + MOCK_USER_SESSION_DATA_TWO, + MOCK_REPO_NAME_ONE + ) + + // Act + const actual = await request(app).post( + `/${MOCK_REPO_NAME_ONE}/123456/comments/viewedComments` + ) + + // Assert + expect(actual.statusCode).toEqual(404) + }) + }) +}) diff --git a/src/integration/Sites.spec.ts b/src/integration/Sites.spec.ts new file mode 100644 index 000000000..865881bdf --- /dev/null +++ b/src/integration/Sites.spec.ts @@ -0,0 +1,248 @@ +import express from "express" +import mockAxios from "jest-mock-axios" +import request from "supertest" + +import { + IsomerAdmin, + Repo, + Reviewer, + ReviewMeta, + ReviewRequest, + ReviewRequestView, + Site, + SiteMember, + User, + Whitelist, +} from "@database/models" +import { generateRouter } from "@fixtures/app" +import UserSessionData from "@root/classes/UserSessionData" +import { mockEmail, mockIsomerUserId } from "@root/fixtures/sessionData" +import { getAuthorizationMiddleware } from "@root/middleware" +import { SitesRouter as _SitesRouter } from "@root/routes/v2/authenticated/sites" +import { GitHubService } from "@root/services/db/GitHubService" +import { ConfigYmlService } from "@root/services/fileServices/YmlFileServices/ConfigYmlService" +import IsomerAdminsService from "@root/services/identity/IsomerAdminsService" +import SitesService from "@root/services/identity/SitesService" +import ReviewRequestService from "@root/services/review/ReviewRequestService" +import { getIdentityAuthService, getUsersService } from "@services/identity" +import CollaboratorsService from "@services/identity/CollaboratorsService" +import { sequelize } from "@tests/database" + +const mockSite = "mockSite" +const mockSiteId = "1" +const mockAdminSite = "adminOnly" +const mockUpdatedAt = "now" +const mockPermissions = { push: true } +const mockPrivate = true + +const gitHubService = new GitHubService({ axiosInstance: mockAxios.create() }) +const configYmlService = new ConfigYmlService({ gitHubService }) +const usersService = getUsersService(sequelize) +const isomerAdminsService = new IsomerAdminsService({ repository: IsomerAdmin }) +const identityAuthService = getIdentityAuthService(gitHubService) +const reviewRequestService = new ReviewRequestService( + gitHubService, + User, + ReviewRequest, + Reviewer, + ReviewMeta, + ReviewRequestView +) +const sitesService = new SitesService({ + siteRepository: Site, + gitHubService, + configYmlService, + usersService, + isomerAdminsService, + reviewRequestService, +}) +const collaboratorsService = new CollaboratorsService({ + siteRepository: Site, + siteMemberRepository: SiteMember, + sitesService, + usersService, + whitelist: Whitelist, +}) + +const authorizationMiddleware = getAuthorizationMiddleware({ + identityAuthService, + usersService, + isomerAdminsService, + collaboratorsService, +}) + +const SitesRouter = new _SitesRouter({ sitesService, authorizationMiddleware }) +const sitesSubrouter = SitesRouter.getRouter() + +// Set up express with defaults and use the router under test +const subrouter = express() +// As we set certain properties on res.locals when the user signs in using github +// In order to do integration testing, we must expose a middleware +// that allows us to set this properties also +subrouter.use((req, res, next) => { + const userSessionData = new UserSessionData({ + isomerUserId: mockIsomerUserId, + email: mockEmail, + }) + res.locals.userSessionData = userSessionData + next() +}) +subrouter.use(sitesSubrouter) +const app = generateRouter(subrouter) + +const mockGenericAxios = mockAxios.create() +mockGenericAxios.get.mockResolvedValue({ + data: [], +}) + +describe("Sites Router", () => { + beforeAll(() => { + // NOTE: Because SitesService uses an axios instance, + // we need to mock the axios instance using es5 named exports + // to ensure that the calls for .get() on the instance + // will actually return a value and not fail. + jest.mock("../services/api/AxiosInstance.ts", () => ({ + __esModule: true, // this property makes it work + genericGitHubAxiosInstance: mockGenericAxios, + })) + }) + + describe("/", () => { + beforeAll(async () => { + // We need to force the relevant tables to start from a clean slate + // Otherwise, some tests may fail due to the auto-incrementing IDs + // not starting from 1 + await User.sync({ force: true }) + await Site.sync({ force: true }) + await Repo.sync({ force: true }) + await SiteMember.sync({ force: true }) + + // Set up User and Site table entries + await User.create({ + id: mockIsomerUserId, + }) + await Site.create({ + id: mockSiteId, + name: mockSite, + apiTokenName: "token", + jobStatus: "READY", + siteStatus: "LAUNCHED", + creatorId: mockIsomerUserId, + }) + await Site.create({ + id: "200", + name: mockAdminSite, + apiTokenName: "token", + jobStatus: "READY", + siteStatus: "LAUNCHED", + creatorId: mockIsomerUserId, + }) + await SiteMember.create({ + userId: mockIsomerUserId, + siteId: mockSiteId, + role: "ADMIN", + }) + await Repo.create({ + name: mockSite, + url: "url", + siteId: mockSiteId, + }) + }) + afterEach(async () => { + // Clean up so that different tests using + // the same mock user don't interfere with each other + await IsomerAdmin.destroy({ + where: { userId: mockIsomerUserId }, + }) + }) + + afterAll(async () => { + await IsomerAdmin.sync({ force: true }) + await SiteMember.sync({ force: true }) + await Site.sync({ force: true }) + await User.sync({ force: true }) + await Repo.sync({ force: true }) + }) + + it("should return list of only sites available to email user", async () => { + // Arrange + const expected = { + siteNames: [ + { + lastUpdated: mockUpdatedAt, + repoName: mockSite, + isPrivate: mockPrivate, + permissions: mockPermissions, + }, + ], + } + + mockGenericAxios.get.mockResolvedValueOnce({ + data: [ + { + pushed_at: mockUpdatedAt, + permissions: mockPermissions, + name: mockSite, + private: mockPrivate, + }, + { + pushed_at: mockUpdatedAt, + permissions: mockPermissions, + name: mockAdminSite, + private: mockPrivate, + }, + ], + }) + + // Act + const actual = await request(app).get("/") + + // Assert + expect(actual.body).toMatchObject(expected) + }) + it("should return list of all sites available for admin", async () => { + // Arrange + await IsomerAdmin.create({ + userId: mockIsomerUserId, + }) + const expected = { + siteNames: [ + { + lastUpdated: mockUpdatedAt, + repoName: mockSite, + isPrivate: mockPrivate, + permissions: mockPermissions, + }, + { + lastUpdated: mockUpdatedAt, + repoName: mockAdminSite, + isPrivate: mockPrivate, + permissions: mockPermissions, + }, + ], + } + mockGenericAxios.get.mockResolvedValueOnce({ + data: [ + { + pushed_at: mockUpdatedAt, + permissions: mockPermissions, + name: mockSite, + private: mockPrivate, + }, + { + pushed_at: mockUpdatedAt, + permissions: mockPermissions, + name: mockAdminSite, + private: mockPrivate, + }, + ], + }) + + // Act + const actual = await request(app).get("/") + + // Assert + expect(actual.body).toMatchObject(expected) + }) + }) +}) diff --git a/src/integration/Users.spec.ts b/src/integration/Users.spec.ts index 4a0a0dca0..00efa1698 100644 --- a/src/integration/Users.spec.ts +++ b/src/integration/Users.spec.ts @@ -2,8 +2,12 @@ import express from "express" import mockAxios from "jest-mock-axios" import request from "supertest" -import { User, Whitelist } from "@database/models" +import { config } from "@config/config" + +import { User, Whitelist, Otp } from "@database/models" import { generateRouter } from "@fixtures/app" +import UserSessionData from "@root/classes/UserSessionData" +import { mockIsomerUserId } from "@root/fixtures/sessionData" import { UsersRouter as _UsersRouter } from "@root/routes/v2/authenticated/users" import { getUsersService } from "@services/identity" import { sequelize } from "@tests/database" @@ -20,6 +24,8 @@ const mockWhitelistedDomain = ".gov.sg" const mockGithubId = "i m a git" const mockValidNumber = "92341234" const mockInvalidNumber = "00000000" +const maxNumOfOtpAttempts = config.get("auth.maxNumOtpAttempts") +const mockInvalidOtp = "000000" const UsersService = getUsersService(sequelize) @@ -32,7 +38,12 @@ const subrouter = express() // In order to do integration testing, we must expose a middleware // that allows us to set this properties also subrouter.use((req, res, next) => { - res.locals.userId = req.body.userId + const userSessionData = new UserSessionData({ + isomerUserId: req.body.userId, + githubId: req.body.githubId, + email: req.body.email, + }) + res.locals.userSessionData = userSessionData next() }) subrouter.use(usersSubrouter) @@ -48,11 +59,24 @@ const extractMobileOtp = (mobileBody: string): string => mobileBody.slice(12, 12 + 6) describe("Users Router", () => { + beforeAll(async () => { + // We need to force the relevant tables to start from a clean slate + // Otherwise, some tests may fail due to the auto-incrementing IDs + // not starting from 1 + await User.sync({ force: true }) + await Whitelist.sync({ force: true }) + }) + afterEach(() => { jest.resetAllMocks() mockAxios.reset() }) + afterAll(async () => { + await User.sync({ force: true }) + await Whitelist.sync({ force: true }) + }) + describe("/email/otp", () => { afterEach(async () => { // Clean up so that different tests using @@ -130,12 +154,15 @@ describe("Users Router", () => { // Clean up so that different tests using // the same mock user don't interfere with each other await User.destroy({ - where: { githubId: mockGithubId }, + where: { id: mockIsomerUserId }, force: true, // hard delete user record to prevent the unique constraint from being violated }) await Whitelist.destroy({ where: { email: mockWhitelistedDomain }, }) + await Otp.destroy({ + where: { email: mockValidEmail }, + }) }) it("should return 200 when the otp is correct", async () => { @@ -146,8 +173,10 @@ describe("Users Router", () => { otp = extractEmailOtp(email.body) return email }) - await User.create({ githubId: mockGithubId }) + + await User.create({ id: mockIsomerUserId }) await Whitelist.create({ email: mockWhitelistedDomain }) + await request(app).post("/email/otp").send({ email: mockValidEmail, }) @@ -156,11 +185,11 @@ describe("Users Router", () => { const actual = await request(app).post("/email/verifyOtp").send({ email: mockValidEmail, otp, - userId: mockGithubId, + userId: mockIsomerUserId, }) const updatedUser = await User.findOne({ where: { - githubId: mockGithubId, + id: mockIsomerUserId, }, }) @@ -172,9 +201,10 @@ describe("Users Router", () => { it("should return 400 when the otp is wrong", async () => { // Arrange const expected = 400 - const wrongOtp = 123456 + const wrongOtp = "123456" mockAxios.post.mockResolvedValueOnce(200) - await User.create({ githubId: mockGithubId }) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) await request(app).post("/email/otp").send({ email: mockValidEmail, }) @@ -183,7 +213,7 @@ describe("Users Router", () => { const actual = await request(app).post("/email/verifyOtp").send({ email: mockValidEmail, otp: wrongOtp, - userId: mockGithubId, + userId: mockIsomerUserId, }) // Assert @@ -194,7 +224,8 @@ describe("Users Router", () => { // Arrange const expected = 400 mockAxios.post.mockResolvedValueOnce(200) - await User.create({ githubId: mockGithubId }) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) await request(app).post("/email/otp").send({ email: mockValidEmail, }) @@ -203,7 +234,7 @@ describe("Users Router", () => { const actual = await request(app).post("/email/verifyOtp").send({ email: mockValidEmail, otp: "", - userId: mockGithubId, + userId: mockIsomerUserId, }) // Assert @@ -214,7 +245,8 @@ describe("Users Router", () => { // Arrange const expected = 400 mockAxios.post.mockResolvedValueOnce(200) - await User.create({ githubId: mockGithubId }) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) await request(app).post("/email/otp").send({ email: mockValidEmail, }) @@ -223,12 +255,127 @@ describe("Users Router", () => { const actual = await request(app).post("/email/verifyOtp").send({ email: mockValidEmail, otp: undefined, - userId: mockGithubId, + userId: mockIsomerUserId, }) // Assert expect(actual.statusCode).toBe(expected) }) + + it("should only ensure the latest email otp is valid", async () => { + // Arrange + const expected = 200 + let otp + mockAxios.post.mockImplementation((_: any, email: any) => { + otp = extractEmailOtp(email.body) + return email + }) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + + // Act + const actual = await request(app).post("/email/verifyOtp").send({ + email: mockValidEmail, + otp, + userId: mockIsomerUserId, + }) + const oldOtp = otp + + // Assert + expect(actual.statusCode).toBe(expected) + + // Arrange + const newExpected = 400 + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + + const newActual = await request(app).post("/email/verifyOtp").send({ + email: mockValidEmail, + otp: oldOtp, + userId: mockIsomerUserId, + }) + + // Assert + expect(oldOtp).not.toBe(otp) + expect(newActual.statusCode).toBe(newExpected) + }) + + it("should return 400 when max number of email otp attempts is reached with correct error message", async () => { + // Arrange + const expected = 400 + mockAxios.post.mockResolvedValue(200) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + + const numOfAttempts = 10 // arbitrary number > maxNumOfAttempts + for (let i = 1; i <= numOfAttempts; i++) { + const actual = await request(app).post("/email/verifyOtp").send({ + email: mockValidEmail, + otp: mockInvalidOtp, + userId: mockIsomerUserId, + }) + const otpEntry = await Otp.findOne({ + where: { email: mockValidEmail }, + }) + + // Assert + expect(actual.statusCode).toBe(expected) + + if (i <= maxNumOfOtpAttempts) { + expect(otpEntry?.attempts).toBe(i) + expect(actual.body.error.message).toBe("OTP is not valid") + } else { + expect(otpEntry?.attempts).toBe(maxNumOfOtpAttempts) + expect(actual.body.error.message).toBe( + "Max number of attempts reached" + ) + } + } + }) + + it("should reset otp attempts when new email otp is requested", async () => { + // Arrange + mockAxios.post.mockResolvedValue(200) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + + const numOfAttempts = 10 // arbitrary number > maxNumOfAttempts + for (let i = 1; i <= numOfAttempts; i++) { + await request(app).post("/email/verifyOtp").send({ + email: mockValidEmail, + otp: mockInvalidOtp, + userId: mockIsomerUserId, + }) + } + + let otpEntry = await Otp.findOne({ + where: { email: mockValidEmail }, + }) + + // Assert + expect(otpEntry?.attempts).toBe(maxNumOfOtpAttempts) + + // Request for new otp and ensure attempts are reset + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + otpEntry = await Otp.findOne({ + where: { email: mockValidEmail }, + }) + + // Assert + expect(otpEntry?.attempts).toBe(0) + }) }) describe("/mobile/otp", () => { @@ -287,7 +434,7 @@ describe("Users Router", () => { // Clean up so that different tests using // the same mock user don't interfere with each other await User.destroy({ - where: { githubId: mockGithubId }, + where: { id: mockIsomerUserId }, force: true, // hard delete user record to prevent the unique constraint from being violated }) }) @@ -300,7 +447,7 @@ describe("Users Router", () => { otp = extractMobileOtp(sms.body) return sms }) - await User.create({ githubId: mockGithubId }) + await User.create({ id: mockIsomerUserId }) await request(app).post("/mobile/otp").send({ mobile: mockValidNumber, }) @@ -309,11 +456,11 @@ describe("Users Router", () => { const actual = await request(app).post("/mobile/verifyOtp").send({ mobile: mockValidNumber, otp, - userId: mockGithubId, + userId: mockIsomerUserId, }) const updatedUser = await User.findOne({ where: { - githubId: mockGithubId, + id: mockIsomerUserId, }, }) @@ -325,9 +472,9 @@ describe("Users Router", () => { it("should return 400 when the otp is wrong", async () => { // Arrange const expected = 400 - const wrongOtp = 123456 + const wrongOtp = "123456" mockAxios.post.mockResolvedValueOnce(200) - await User.create({ githubId: mockGithubId }) + await User.create({ id: mockIsomerUserId }) await request(app).post("/mobile/otp").send({ mobile: mockValidNumber, }) @@ -336,7 +483,7 @@ describe("Users Router", () => { const actual = await request(app).post("/mobile/verifyOtp").send({ mobile: mockValidNumber, otp: wrongOtp, - userId: mockGithubId, + userId: mockIsomerUserId, }) // Assert @@ -347,7 +494,7 @@ describe("Users Router", () => { // Arrange const expected = 400 mockAxios.post.mockResolvedValueOnce(200) - await User.create({ githubId: mockGithubId }) + await User.create({ id: mockIsomerUserId }) await request(app).post("/mobile/otp").send({ mobile: mockValidNumber, }) @@ -356,7 +503,7 @@ describe("Users Router", () => { const actual = await request(app).post("/mobile/verifyOtp").send({ mobile: mockValidNumber, otp: "", - userId: mockGithubId, + userId: mockIsomerUserId, }) // Assert @@ -367,7 +514,7 @@ describe("Users Router", () => { // Arrange const expected = 400 mockAxios.post.mockResolvedValueOnce(200) - await User.create({ githubId: mockGithubId }) + await User.create({ id: mockIsomerUserId }) await request(app).post("/mobile/otp").send({ mobile: mockValidNumber, }) @@ -376,11 +523,124 @@ describe("Users Router", () => { const actual = await request(app).post("/mobile/verifyOtp").send({ mobile: mockValidNumber, otp: undefined, - userId: mockGithubId, + userId: mockIsomerUserId, }) // Assert expect(actual.statusCode).toBe(expected) }) + + it("should only ensure the latest mobile otp is valid", async () => { + // Arrange + const expected = 200 + let otp + mockAxios.post.mockImplementation((_: any, sms: any) => { + otp = extractMobileOtp(sms.body) + return sms + }) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + + // Act + const actual = await request(app).post("/mobile/verifyOtp").send({ + mobile: mockValidNumber, + otp, + userId: mockIsomerUserId, + }) + const oldOtp = otp + + // Assert + expect(actual.statusCode).toBe(expected) + + // Arrange + const newExpected = 400 + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + + const newActual = await request(app).post("/mobile/verifyOtp").send({ + mobile: mockValidNumber, + otp: oldOtp, + userId: mockIsomerUserId, + }) + + // Assert + expect(oldOtp).not.toBe(otp) + expect(newActual.statusCode).toBe(newExpected) + }) + + it("should return 400 when max number of mobile otp attempts is reached with correct error message", async () => { + // Arrange + const expected = 400 + mockAxios.post.mockResolvedValueOnce(200) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + + const numOfAttempts = 10 // arbitrary number > maxNumOfAttempts + for (let i = 1; i <= numOfAttempts; i++) { + const actual = await request(app).post("/mobile/verifyOtp").send({ + mobile: mockValidNumber, + otp: mockInvalidOtp, + userId: mockIsomerUserId, + }) + const otpEntry = await Otp.findOne({ + where: { mobileNumber: mockValidNumber }, + }) + + // Assert + expect(actual.statusCode).toBe(expected) + + if (i <= maxNumOfOtpAttempts) { + expect(otpEntry?.attempts).toBe(i) + expect(actual.body.error.message).toBe("OTP is not valid") + } else { + expect(otpEntry?.attempts).toBe(maxNumOfOtpAttempts) + expect(actual.body.error.message).toBe( + "Max number of attempts reached" + ) + } + } + }) + + it("should reset otp attempts when new mobile otp is requested", async () => { + // Arrange + mockAxios.post.mockResolvedValue(200) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + + const numOfAttempts = 10 // arbitrary number > maxNumOfAttempts + for (let i = 1; i <= numOfAttempts; i++) { + await request(app).post("/mobile/verifyOtp").send({ + mobile: mockValidNumber, + otp: mockInvalidOtp, + userId: mockIsomerUserId, + }) + } + + let otpEntry = await Otp.findOne({ + where: { mobileNumber: mockValidNumber }, + }) + + // Assert + expect(otpEntry?.attempts).toBe(maxNumOfOtpAttempts) + + // Request for new otp and ensure attempts are reset + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + otpEntry = await Otp.findOne({ + where: { mobileNumber: mockValidNumber }, + }) + + // Assert + expect(otpEntry?.attempts).toBe(0) + }) }) }) diff --git a/src/logger/logger.js b/src/logger/logger.js index 99b0e50d2..015593856 100644 --- a/src/logger/logger.js +++ b/src/logger/logger.js @@ -1,5 +1,6 @@ /* eslint-disable no-console */ // Imports + const AWS = require("aws-sdk") const Bluebird = require("bluebird") const moment = require("moment-timezone") @@ -7,8 +8,10 @@ const moment = require("moment-timezone") const winston = require("winston") const WinstonCloudwatch = require("winston-cloudwatch") +const { config } = require("@config/config") + // Env vars -const { NODE_ENV } = process.env +const NODE_ENV = config.get("env") // AWS const AWS_REGION_NAME = "ap-southeast-1" @@ -19,9 +22,10 @@ const metadataRequest = Bluebird.promisify( ) // Constants +// TODO: Check this env var as it is not in example const LOG_GROUP_NAME = `${process.env.AWS_BACKEND_EB_ENV_NAME}/nodejs.log` const IS_PROD_ENV = - NODE_ENV !== "LOCAL_DEV" && NODE_ENV !== "DEV" && NODE_ENV !== "test" + NODE_ENV !== "dev" && NODE_ENV !== "staging" && NODE_ENV !== "test" const IS_TEST_ENV = NODE_ENV === "test" function timestampGenerator() { diff --git a/src/middleware/__tests__/authorization.ts b/src/middleware/__tests__/authorization.ts new file mode 100644 index 000000000..14f254f11 --- /dev/null +++ b/src/middleware/__tests__/authorization.ts @@ -0,0 +1,111 @@ +import { NextFunction, Request, Response } from "express" + +import { ForbiddenError } from "@errors/ForbiddenError" + +import { AuthorizationMiddleware } from "@middleware/authorization" + +import UserWithSiteSessionData from "@root/classes/UserWithSiteSessionData" +import AuthorizationMiddlewareService from "@root/services/middlewareServices/AuthorizationMiddlewareService" + +describe("Authorization middleware", () => { + const TEST_SITE_NAME = "sitename" + const TEST_ISOMER_USER_ID = "1" + const mockAuthorizationMiddlewareService = { + checkIsSiteAdmin: jest.fn(), + checkIsSiteMember: jest.fn(), + } + const mockReq = ({ + params: { siteName: TEST_SITE_NAME }, + } as unknown) as Request< + never, + unknown, + unknown, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > + const mockRes = ({ + locals: { + sessionData: { getIsomerUserId: jest.fn(() => TEST_ISOMER_USER_ID) }, + }, + } as unknown) as Response< + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > + const mockNext = jest.fn() as NextFunction + + const authorizationMiddleware = new AuthorizationMiddleware({ + authorizationMiddlewareService: (mockAuthorizationMiddlewareService as unknown) as AuthorizationMiddlewareService, + }) + + beforeEach(() => { + jest.clearAllMocks() + }) + + describe("verifySiteAdmin", () => { + it("correctly verifies that user is a site admin if no error is thrown in the authorization middleware service", async () => { + // Arrange + mockAuthorizationMiddlewareService.checkIsSiteAdmin.mockResolvedValue( + undefined + ) + + // Act + await authorizationMiddleware.verifySiteAdmin(mockReq, mockRes, mockNext) + + // Assert + expect( + mockAuthorizationMiddlewareService.checkIsSiteAdmin + ).toHaveBeenCalled() + expect(mockNext).toHaveBeenCalledWith() + }) + + it("correctly verifies that user is not site admin if an error is thrown in the authorization middleware service", async () => { + // Arrange + mockAuthorizationMiddlewareService.checkIsSiteAdmin.mockResolvedValue( + new ForbiddenError() + ) + + // Act + await authorizationMiddleware.verifySiteAdmin(mockReq, mockRes, mockNext) + + // Assert + expect( + mockAuthorizationMiddlewareService.checkIsSiteAdmin + ).toHaveBeenCalled() + expect(mockNext).toHaveBeenCalledWith(new ForbiddenError()) + }) + }) + + describe("verifySiteMember", () => { + it("correctly verifies that user is a site member if no error is thrown in the authorization middleware service", async () => { + // Arrange + mockAuthorizationMiddlewareService.checkIsSiteMember.mockResolvedValue( + undefined + ) + + // Act + await authorizationMiddleware.verifySiteMember(mockReq, mockRes, mockNext) + + // Assert + expect( + mockAuthorizationMiddlewareService.checkIsSiteMember + ).toHaveBeenCalled() + expect(mockNext).toHaveBeenCalledWith() + }) + + it("correctly verifies that user is not site member if an error is thrown in the authorization middleware service", async () => { + // Arrange + mockAuthorizationMiddlewareService.checkIsSiteMember.mockResolvedValue( + new ForbiddenError() + ) + + // Act + await authorizationMiddleware.verifySiteMember(mockReq, mockRes, mockNext) + + // Assert + expect( + mockAuthorizationMiddlewareService.checkIsSiteMember + ).toHaveBeenCalled() + expect(mockNext).toHaveBeenCalledWith(new ForbiddenError()) + }) + }) +}) diff --git a/src/middleware/apiLogger.js b/src/middleware/apiLogger.js index 698f577e0..0264859ee 100644 --- a/src/middleware/apiLogger.js +++ b/src/middleware/apiLogger.js @@ -15,10 +15,11 @@ apiLogger.use((req, res, next) => { const ipAddress = req.headers["x-forwarded-for"] // Get user GitHub id - let userId - if (res.locals.userId) userId = res.locals.userId + const userEmail = res.locals.sessionData + ? res.locals.sessionData.email + : "(not logged in)" - let logMessage = `User ${userId} from IP address ${ + let logMessage = `User ${userEmail} from IP address ${ ipAddress ? `(IP: ${ipAddress})` : undefined } called ${req.method} on ${req.path}` if (!isObjEmpty(req.query)) { diff --git a/src/middleware/auth.js b/src/middleware/auth.js deleted file mode 100644 index 356702f0d..000000000 --- a/src/middleware/auth.js +++ /dev/null @@ -1,49 +0,0 @@ -const autoBind = require("auto-bind") - -class AuthMiddleware { - constructor({ authMiddlewareService }) { - this.authMiddlewareService = authMiddlewareService - // We need to bind all methods because we don't invoke them from the class directly - autoBind(this) - } - - verifyJwt(req, res, next) { - const { cookies, originalUrl: url } = req - const { accessToken, userId } = this.authMiddlewareService.verifyJwt({ - cookies, - url, - }) - res.locals.accessToken = accessToken - res.locals.userId = userId - return next() - } - - whoamiAuth(req, res, next) { - const { cookies, originalUrl: url } = req - const { accessToken, userId } = this.authMiddlewareService.whoamiAuth({ - cookies, - url, - }) - res.locals.accessToken = accessToken - if (userId) res.locals.userId = userId - return next() - } - - // Replace access token with site access token if it is available - async useSiteAccessTokenIfAvailable(req, res, next) { - const { - params: { siteName }, - } = req - const { userId, accessToken: userAccessToken } = res.locals - - const siteAccessToken = await this.authMiddlewareService.retrieveSiteAccessTokenIfAvailable( - { siteName, userAccessToken, userId } - ) - - if (siteAccessToken) res.locals.accessToken = siteAccessToken - - return next() - } -} - -export { AuthMiddleware } diff --git a/src/middleware/authentication.ts b/src/middleware/authentication.ts new file mode 100644 index 000000000..0d68cdb18 --- /dev/null +++ b/src/middleware/authentication.ts @@ -0,0 +1,47 @@ +import autoBind from "auto-bind" +import { NextFunction, Request, Response } from "express" +import { Session } from "express-session" + +import UserSessionData from "@root/classes/UserSessionData" +import AuthenticationMiddlewareService from "@root/services/middlewareServices/AuthenticationMiddlewareService" +import { SessionData } from "@root/types/express/session" + +interface RequestWithSession extends Request { + session: Session & SessionData +} + +export class AuthenticationMiddleware { + private readonly authenticationMiddlewareService: AuthenticationMiddlewareService + + constructor({ + authenticationMiddlewareService, + }: { + authenticationMiddlewareService: AuthenticationMiddlewareService + }) { + this.authenticationMiddlewareService = authenticationMiddlewareService + // We need to bind all methods because we don't invoke them from the class directly + autoBind(this) + } + + verifyAccess(req: RequestWithSession, res: Response, next: NextFunction) { + const { cookies, originalUrl: url, session } = req + const { + accessToken, + githubId, + isomerUserId, + email, + } = this.authenticationMiddlewareService.verifyAccess({ + cookies, + url, + userInfo: session.userInfo, + }) + const userSessionData = new UserSessionData({ + accessToken, + githubId, + isomerUserId, + email, + }) + res.locals.userSessionData = userSessionData + return next() + } +} diff --git a/src/middleware/authorization.ts b/src/middleware/authorization.ts new file mode 100644 index 000000000..1bfbdf507 --- /dev/null +++ b/src/middleware/authorization.ts @@ -0,0 +1,79 @@ +import autoBind from "auto-bind" + +import { ForbiddenError } from "@errors/ForbiddenError" + +import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" + +import { RequestHandler } from "@root/types" +import AuthorizationMiddlewareService from "@services/middlewareServices/AuthorizationMiddlewareService" + +export class AuthorizationMiddleware { + private readonly authorizationMiddlewareService: AuthorizationMiddlewareService + + constructor({ + authorizationMiddlewareService, + }: { + authorizationMiddlewareService: AuthorizationMiddlewareService + }) { + this.authorizationMiddlewareService = authorizationMiddlewareService + // We need to bind all methods because we don't invoke them from the class directly + autoBind(this) + } + + // Check whether a user is using email login or github login + verifyIsEmailUser: RequestHandler< + never, + unknown, + unknown, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res, next) => { + const { userWithSiteSessionData } = res.locals + if (!userWithSiteSessionData.isEmailUser()) return next("router") + return next() + } + + // Check whether a user is a site admin + verifySiteAdmin: RequestHandler< + never, + unknown, + unknown, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res, next) => { + const { userWithSiteSessionData } = res.locals + + try { + const result = await this.authorizationMiddlewareService.checkIsSiteAdmin( + userWithSiteSessionData + ) + if (result instanceof ForbiddenError) return next(new ForbiddenError()) + + return next() + } catch (err) { + return next(err) + } + } + + // Check whether a user is a site member + verifySiteMember: RequestHandler< + never, + unknown, + unknown, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res, next) => { + const { userWithSiteSessionData } = res.locals + + try { + const result = await this.authorizationMiddlewareService.checkIsSiteMember( + userWithSiteSessionData + ) + if (result instanceof ForbiddenError) return next(new ForbiddenError()) + + return next() + } catch (err) { + return next(err) + } + } +} diff --git a/src/middleware/index.ts b/src/middleware/index.ts index 68e44e20f..3138e3c05 100644 --- a/src/middleware/index.ts +++ b/src/middleware/index.ts @@ -1,22 +1,50 @@ import FormSG from "@opengovsg/formsg-sdk" -import express, { RequestHandler } from "express" +import express, { RequestHandler as ExpressRequestHandler } from "express" -import AuthService from "@services/identity/AuthService" -import { AuthMiddlewareService } from "@services/middlewareServices/AuthMiddlewareService" +import { AuthenticationMiddleware } from "@middleware/authentication" +import { AuthorizationMiddleware } from "@middleware/authorization" + +import UserSessionData from "@classes/UserSessionData" +import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" + +import CollaboratorsService from "@root/services/identity/CollaboratorsService" +import { RequestHandler } from "@root/types" +import IdentityAuthService from "@services/identity/AuthService" +import IsomerAdminsService from "@services/identity/IsomerAdminsService" +import UsersService from "@services/identity/UsersService" +import AuthenticationMiddlewareService from "@services/middlewareServices/AuthenticationMiddlewareService" +import AuthorizationMiddlewareService from "@services/middlewareServices/AuthorizationMiddlewareService" import FormsProcessingService from "@services/middlewareServices/FormsProcessingService" -import { AuthMiddleware } from "./auth" +const getAuthenticationMiddleware = () => { + const authenticationMiddlewareService = new AuthenticationMiddlewareService() + const authenticationMiddleware = new AuthenticationMiddleware({ + authenticationMiddlewareService, + }) + return authenticationMiddleware +} -const getAuthMiddleware = ({ +const getAuthorizationMiddleware = ({ identityAuthService, + usersService, + isomerAdminsService, + collaboratorsService, }: { - identityAuthService: AuthService + identityAuthService: IdentityAuthService + usersService: UsersService + isomerAdminsService: IsomerAdminsService + collaboratorsService: CollaboratorsService }) => { - const authMiddlewareService = new AuthMiddlewareService({ + const authorizationMiddlewareService = new AuthorizationMiddlewareService({ identityAuthService, + usersService, + isomerAdminsService, + collaboratorsService, + }) + const authorizationMiddleware = new AuthorizationMiddleware({ + authorizationMiddlewareService, }) - const authMiddleware = new AuthMiddleware({ authMiddlewareService }) - return authMiddleware + return authorizationMiddleware } const formsg = FormSG() @@ -34,10 +62,37 @@ const formSGService = new FormsProcessingService({ formsg }) * * Retrieve form data from res.locals.submission. */ -const attachFormSGHandler = (formKey: string): RequestHandler[] => [ +const attachFormSGHandler = (formKey: string): ExpressRequestHandler[] => [ formSGService.authenticate(), express.json(), formSGService.decrypt({ formKey }), ] -export { getAuthMiddleware, attachFormSGHandler } +const attachSiteHandler: RequestHandler< + Record, + unknown, + unknown, + never, + { + userSessionData: UserSessionData + userWithSiteSessionData: UserWithSiteSessionData + } +> = (req, res, next) => { + const { + params: { siteName }, + } = req + const { userSessionData } = res.locals + const userWithSiteSessionData = new UserWithSiteSessionData({ + ...userSessionData.getGithubParams(), + siteName, + }) + res.locals.userWithSiteSessionData = userWithSiteSessionData + return next() +} + +export { + getAuthenticationMiddleware, + getAuthorizationMiddleware, + attachFormSGHandler, + attachSiteHandler, +} diff --git a/src/middleware/notificationOnEditHandler.ts b/src/middleware/notificationOnEditHandler.ts new file mode 100644 index 000000000..1eb1b9ef2 --- /dev/null +++ b/src/middleware/notificationOnEditHandler.ts @@ -0,0 +1,78 @@ +import autoBind from "auto-bind" + +import UserWithSiteSessionData from "@root/classes/UserWithSiteSessionData" +import { SiteMember, User } from "@root/database/models" +import CollaboratorsService from "@root/services/identity/CollaboratorsService" +import NotificationsService from "@root/services/identity/NotificationsService" +import SitesService from "@root/services/identity/SitesService" +import ReviewRequestService from "@root/services/review/ReviewRequestService" +import { RequestHandler } from "@root/types" + +export class NotificationOnEditHandler { + private readonly reviewRequestService: ReviewRequestService + + private readonly sitesService: SitesService + + private readonly collaboratorsService: CollaboratorsService + + private readonly notificationsService: NotificationsService + + constructor({ + reviewRequestService, + sitesService, + collaboratorsService, + notificationsService, + }: { + reviewRequestService: ReviewRequestService + sitesService: SitesService + collaboratorsService: CollaboratorsService + notificationsService: NotificationsService + }) { + this.reviewRequestService = reviewRequestService + this.sitesService = sitesService + this.collaboratorsService = collaboratorsService + this.notificationsService = notificationsService + // We need to bind all methods because we don't invoke them from the class directly + autoBind(this) + } + + /** + * Creates a notification. Requires attachSiteHandler as a precondition + */ + createNotification: RequestHandler< + never, + unknown, + unknown, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res, next) => { + const { userWithSiteSessionData } = res.locals + + if (!userWithSiteSessionData.isEmailUser()) return + + const { siteName, isomerUserId: userId, email } = userWithSiteSessionData + const site = await this.sitesService.getBySiteName(siteName) + const users = await this.collaboratorsService.list(siteName, userId) + if (!site) throw new Error("Site should always exist") + const reviewRequests = await this.reviewRequestService.listReviewRequest( + userWithSiteSessionData, + site + ) + if (reviewRequests.length === 0) return + // For now, we only have 1 active review request + const reviewRequest = reviewRequests[0] + + await Promise.all( + users.map(async (user: User & { SiteMember: SiteMember }) => { + if (user.id.toString() === userId) return // Don't create notification for the source user + const { SiteMember: siteMember } = user + await this.notificationsService.create({ + siteMember, + link: `/sites/${siteName}/review/${reviewRequest.id}`, + notificationType: "updated_request", + notificationSourceUsername: email, + }) + }) + ) + } +} diff --git a/src/middleware/routeHandler.js b/src/middleware/routeHandler.js index e0e89cb94..2e582b890 100644 --- a/src/middleware/routeHandler.js +++ b/src/middleware/routeHandler.js @@ -1,5 +1,7 @@ const { backOff } = require("exponential-backoff") +const { default: GithubSessionData } = require("@classes/GithubSessionData") + const { lock, unlock } = require("@utils/mutex-utils") const { getCommitAndTreeSha, revertCommit } = require("@utils/utils.js") @@ -22,7 +24,7 @@ const attachWriteRouteHandlerWrapper = (routeHandler) => async ( ) => { const { siteName } = req.params await lock(siteName) - routeHandler(req, res).catch(async (err) => { + routeHandler(req, res, next).catch(async (err) => { await unlock(siteName) next(err) }) @@ -34,9 +36,11 @@ const attachRollbackRouteHandlerWrapper = (routeHandler) => async ( res, next ) => { - const { accessToken } = res.locals + const { userSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userSessionData + await lock(siteName) let originalCommitSha @@ -46,15 +50,18 @@ const attachRollbackRouteHandlerWrapper = (routeHandler) => async ( accessToken ) - res.locals.currentCommitSha = currentCommitSha - res.locals.treeSha = treeSha + const githubSessionData = new GithubSessionData({ + currentCommitSha, + treeSha, + }) + res.locals.githubSessionData = githubSessionData originalCommitSha = currentCommitSha } catch (err) { await unlock(siteName) next(err) } - routeHandler(req, res).catch(async (err) => { + routeHandler(req, res, next).catch(async (err) => { try { await backOff(() => revertCommit(originalCommitSha, siteName, accessToken) diff --git a/src/routes/formsgSiteCreation.ts b/src/routes/formsgSiteCreation.ts index 4a9402179..a41ec0a5d 100644 --- a/src/routes/formsgSiteCreation.ts +++ b/src/routes/formsgSiteCreation.ts @@ -2,6 +2,8 @@ import { DecryptedContent } from "@opengovsg/formsg-sdk/dist/types" import autoBind from "auto-bind" import express, { RequestHandler } from "express" +import { config } from "@config/config" + import logger from "@logger/logger" import { BadRequestError } from "@errors/BadRequestError" @@ -14,7 +16,7 @@ import UsersService from "@services/identity/UsersService" import InfraService from "@services/infra/InfraService" import { mailer } from "@services/utilServices/MailClient" -const { SITE_CREATE_FORM_KEY } = process.env +const SITE_CREATE_FORM_KEY = config.get("formSg.siteCreateFormKey") const REQUESTER_EMAIL_FIELD = "Government E-mail" const SITE_NAME_FIELD = "Site Name" const REPO_NAME_FIELD = "Repository Name" @@ -82,7 +84,6 @@ export class FormsgRouter { // 3. Use service to create site const { deployment } = await this.infraService.createSite( - submissionId, foundUser, siteName, repoName @@ -138,14 +139,9 @@ export class FormsgRouter { getRouter() { const router = express.Router({ mergeParams: true }) - if (!SITE_CREATE_FORM_KEY) { - throw new InitializationError( - "Required SITE_CREATE_FORM_KEY environment variable is empty." - ) - } router.post( "/create-site", - attachFormSGHandler(SITE_CREATE_FORM_KEY || ""), + attachFormSGHandler(SITE_CREATE_FORM_KEY), this.formsgCreateSite ) diff --git a/src/routes/v1/auth.js b/src/routes/v1/auth.js index abe7e7601..11f6f4ed4 100644 --- a/src/routes/v1/auth.js +++ b/src/routes/v1/auth.js @@ -1,8 +1,14 @@ +import { config } from "@config/config" + +import { isSecure } from "@root/utils/auth-utils" + const axios = require("axios") const express = require("express") const queryString = require("query-string") const uuid = require("uuid/v4") +const logger = require("@logger/logger") + // Import error const { AuthError } = require("@errors/AuthError") const { ForbiddenError } = require("@errors/ForbiddenError") @@ -13,21 +19,17 @@ const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const validateStatus = require("@utils/axios-utils") const jwtUtils = require("@utils/jwt-utils") -const { authMiddleware } = require("@root/middleware") +const { authenticationMiddleware } = require("@root/middleware") // Import services const identityServices = require("@services/identity") const router = express.Router() -const { CLIENT_ID } = process.env -const { CLIENT_SECRET } = process.env -const { REDIRECT_URI } = process.env -const AUTH_TOKEN_EXPIRY_MS = parseInt( - process.env.AUTH_TOKEN_EXPIRY_DURATION_IN_MILLISECONDS, - 10 -) +const CLIENT_ID = config.get("github.clientId") +const CLIENT_SECRET = config.get("github.clientSecret") +const REDIRECT_URI = config.get("github.redirectUri") const CSRF_TOKEN_EXPIRY_MS = 600000 -const { FRONTEND_URL } = process.env +const FRONTEND_URL = config.get("app.frontendUrl") const CSRF_COOKIE_NAME = "isomer-csrf" const COOKIE_NAME = "isomercms" @@ -51,10 +53,7 @@ async function authRedirect(req, res) { const cookieSettings = { expires: csrfTokenExpiry, httpOnly: true, - secure: - process.env.NODE_ENV !== "DEV" && - process.env.NODE_ENV !== "LOCAL_DEV" && - process.env.NODE_ENV !== "test", + secure: isSecure, } const token = jwtUtils.signToken({ state }) @@ -111,37 +110,26 @@ async function githubAuth(req, res) { const user = await identityServices.usersService.login(githubId) if (!user) throw Error("Failed to create user") - const authTokenExpiry = new Date() - authTokenExpiry.setTime(authTokenExpiry.getTime() + AUTH_TOKEN_EXPIRY_MS) - - const cookieSettings = { - path: "/", - expires: authTokenExpiry, - httpOnly: true, - sameSite: true, - secure: - process.env.NODE_ENV !== "DEV" && - process.env.NODE_ENV !== "LOCAL_DEV" && - process.env.NODE_ENV !== "test", + const userInfo = { + accessToken: jwtUtils.encryptToken(accessToken), + githubId, + isomerUserId: user.id, } - - const token = jwtUtils.signToken({ - access_token: jwtUtils.encryptToken(accessToken), - user_id: githubId, - isomer_user_id: user.id, - }) - - res.cookie(COOKIE_NAME, token, cookieSettings) + Object.assign(req.session, { userInfo }) + logger.info(`User ${userInfo.email} successfully logged in`) return res.redirect(`${FRONTEND_URL}/sites`) } async function logout(req, res) { clearAllCookies(res) + req.session.destroy() + logger.info(`User ${userInfo.email} successfully logged out`) return res.sendStatus(200) } async function whoami(req, res) { - const { accessToken } = res.locals + const { userSessionData } = res.locals + const { accessToken } = userSessionData // Make a call to github const endpoint = "https://api.github.com/user" @@ -172,7 +160,7 @@ router.get("/", attachReadRouteHandlerWrapper(githubAuth)) router.delete("/logout", attachReadRouteHandlerWrapper(logout)) router.get( "/whoami", - authMiddleware.whoamiAuth, + authenticationMiddleware.verifyAccess, attachReadRouteHandlerWrapper(whoami) ) diff --git a/src/routes/v1/authenticated/index.js b/src/routes/v1/authenticated/index.js index 115832aee..ad2522939 100644 --- a/src/routes/v1/authenticated/index.js +++ b/src/routes/v1/authenticated/index.js @@ -4,7 +4,7 @@ const sitesRouter = require("@routes/v1/authenticated/sites") const { UsersRouter } = require("@routes/v2/authenticated/users") const getAuthenticatedSubrouter = ({ - authMiddleware, + authenticationMiddleware, usersService, apiLogger, }) => { @@ -13,8 +13,8 @@ const getAuthenticatedSubrouter = ({ const authenticatedSubrouter = express.Router({ mergeParams: true }) - authenticatedSubrouter.use(authMiddleware.verifyJwt) - // NOTE: apiLogger needs to be after `verifyJwt` as it logs the github username + authenticatedSubrouter.use(authenticationMiddleware.verifyAccess) + // NOTE: apiLogger needs to be after `verifyAccess` as it logs the github username // which is only available after verifying that the jwt is valid authenticatedSubrouter.use(apiLogger) authenticatedSubrouter.use("/sites", sitesRouter) diff --git a/src/routes/v1/authenticated/sites.js b/src/routes/v1/authenticated/sites.js index 434995707..9235fa8de 100644 --- a/src/routes/v1/authenticated/sites.js +++ b/src/routes/v1/authenticated/sites.js @@ -1,3 +1,5 @@ +import { config } from "@config/config" + const Bluebird = require("bluebird") const express = require("express") const _ = require("lodash") @@ -14,8 +16,8 @@ const { const router = express.Router() const GH_MAX_REPO_COUNT = 100 -const ISOMERPAGES_REPO_PAGE_COUNT = process.env.ISOMERPAGES_REPO_PAGE_COUNT || 3 -const ISOMER_GITHUB_ORG_NAME = process.env.GITHUB_ORG_NAME +const ISOMERPAGES_REPO_PAGE_COUNT = config.get("sites.pageCount") +const ISOMER_GITHUB_ORG_NAME = config.get("github.orgName") const ISOMER_ADMIN_REPOS = [ "isomercms-backend", "isomercms-frontend", @@ -53,7 +55,8 @@ const timeDiff = (lastUpdated) => { /* Returns a list of all sites (repos) that the user has access to on Isomer. */ // TO-DO: Paginate properly async function getSites(req, res) { - const { accessToken } = res.locals + const { userSessionData } = res.locals + const { accessToken } = userSessionData const endpoint = `https://api.github.com/orgs/${ISOMER_GITHUB_ORG_NAME}/repos` @@ -106,9 +109,11 @@ async function getSites(req, res) { /* Checks if a user has access to a repo. */ async function checkHasAccess(req, res) { - const { accessToken } = res.locals - const { userId } = res.locals + const { userSessionData } = res.locals const { siteName } = req.params + + const { accessToken } = userSessionData + const userId = userSessionData.githubId const endpoint = `https://api.github.com/repos/${ISOMER_GITHUB_ORG_NAME}/${siteName}/collaborators/${userId}` try { @@ -130,9 +135,10 @@ async function checkHasAccess(req, res) { /* Gets the last updated time of the repo. */ async function getLastUpdated(req, res) { - const { accessToken } = res.locals + const { userSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userSessionData const endpoint = `https://api.github.com/repos/${ISOMER_GITHUB_ORG_NAME}/${siteName}` const resp = await axios.get(endpoint, { headers: { @@ -147,9 +153,10 @@ async function getLastUpdated(req, res) { /* Gets the link to the staging site for a repo. */ async function getStagingUrl(req, res) { // TODO: reconsider how we can retrieve url - we can store this in _config.yml or a dynamodb - const { accessToken } = res.locals + const { userSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userSessionData const endpoint = `https://api.github.com/repos/${ISOMER_GITHUB_ORG_NAME}/${siteName}` const resp = await axios.get(endpoint, { headers: { diff --git a/src/routes/v1/authenticatedSites/collectionPages.js b/src/routes/v1/authenticatedSites/collectionPages.js index faf86fee2..3c0f6c2b6 100644 --- a/src/routes/v1/authenticatedSites/collectionPages.js +++ b/src/routes/v1/authenticatedSites/collectionPages.js @@ -1,7 +1,6 @@ const Bluebird = require("bluebird") const express = require("express") const _ = require("lodash") -const yaml = require("yaml") // Import errors const { NotFoundError } = require("@errors/NotFoundError") @@ -20,13 +19,15 @@ const { File, CollectionPageType } = require("@classes/File") // Import utils const { readCollectionPageUtilFunc } = require("@utils/route-utils") +const { sanitizedYamlParse } = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) // List pages in collection async function listCollectionPages(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, collectionName } = req.params + const { accessToken } = userWithSiteSessionData // TO-DO: Verify that collection exists @@ -40,8 +41,9 @@ async function listCollectionPages(req, res) { // Get details on all pages in a collection async function listCollectionPagesDetails(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, collectionName } = req.params + const { accessToken } = userWithSiteSessionData // Verify that collection exists const IsomerCollection = new Collection(accessToken, siteName) @@ -63,7 +65,9 @@ async function listCollectionPagesDetails(req, res) { collectionName, page.fileName ) - const frontMatter = yaml.parse(Base64.decode(content).split("---")[1]) + const frontMatter = sanitizedYamlParse( + Base64.decode(content).split("---")[1] + ) return { fileName: page.fileName, title: frontMatter.title, @@ -115,7 +119,8 @@ async function listCollectionPagesDetails(req, res) { // // Create new page in collection async function createCollectionPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, collectionName, pageName: encodedPageName } = req.params const { content: pageContent } = req.body @@ -134,7 +139,8 @@ async function createCollectionPage(req, res) { // Read page in collection async function readCollectionPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName: encodedPageName, collectionName } = req.params const pageName = decodeURIComponent(encodedPageName) @@ -153,7 +159,8 @@ async function readCollectionPage(req, res) { // Update page in collection async function updateCollectionPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName: encodedPageName, collectionName } = req.params const { content: pageContent, sha } = req.body @@ -178,7 +185,8 @@ async function updateCollectionPage(req, res) { // Delete page in collection async function deleteCollectionPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName: encodedPageName, collectionName } = req.params const { sha } = req.body @@ -203,7 +211,8 @@ async function deleteCollectionPage(req, res) { // Rename page in collection async function renameCollectionPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, diff --git a/src/routes/v1/authenticatedSites/collections.js b/src/routes/v1/authenticatedSites/collections.js index 0f436809e..40399e87d 100644 --- a/src/routes/v1/authenticatedSites/collections.js +++ b/src/routes/v1/authenticatedSites/collections.js @@ -1,5 +1,4 @@ const express = require("express") -const yaml = require("yaml") // Import middleware const { @@ -14,13 +13,18 @@ const { File, CollectionPageType, PageType } = require("@classes/File") const { Subfolder } = require("@classes/Subfolder") const { deslugifyCollectionName } = require("@utils/utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) // List collections async function listCollections(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerCollection = new Collection(accessToken, siteName) const collections = await IsomerCollection.list() @@ -30,9 +34,10 @@ async function listCollections(req, res) { // Create new collection async function createNewCollection(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params const { collectionName } = req.body + const { accessToken } = userWithSiteSessionData const IsomerCollection = new Collection(accessToken, siteName) await IsomerCollection.create(collectionName) @@ -45,8 +50,10 @@ async function deleteCollection(req, res) { // TO-DO: Verify that collection exists // Remove collection from config file - const { accessToken, currentCommitSha, treeSha } = res.locals + const { userWithSiteSessionData, githubSessionData } = res.locals const { siteName, collectionName } = req.params + const { accessToken } = userWithSiteSessionData + const { currentCommitSha, treeSha } = githubSessionData.getGithubState() const IsomerCollection = new Collection(accessToken, siteName) await IsomerCollection.delete(collectionName, currentCommitSha, treeSha) @@ -59,8 +66,10 @@ async function renameCollection(req, res) { // TO-DO: Verify that collection exists // Remove collection from config file - const { accessToken, currentCommitSha, treeSha } = res.locals + const { userWithSiteSessionData, githubSessionData } = res.locals const { siteName, collectionName, newCollectionName } = req.params + const { accessToken } = userWithSiteSessionData + const { currentCommitSha, treeSha } = githubSessionData.getGithubState() const IsomerCollection = new Collection(accessToken, siteName) await IsomerCollection.rename( @@ -75,7 +84,8 @@ async function renameCollection(req, res) { // Move files in collection async function moveFiles(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, collectionPath, targetPath } = req.params const { files } = req.body const processedCollectionPathTokens = decodeURIComponent( @@ -141,13 +151,13 @@ async function moveFiles(req, res) { const [unused, encodedFrontMatter, pageContent] = Base64.decode( content ).split("---") - const frontMatter = yaml.parse(encodedFrontMatter) + const frontMatter = sanitizedYamlParse(encodedFrontMatter) if (targetSubfolderName) frontMatter.third_nav_title = deslugifyCollectionName( targetSubfolderName ) else delete frontMatter.third_nav_title - const newFrontMatter = yaml.stringify(frontMatter) + const newFrontMatter = sanitizedYamlStringify(frontMatter) const newContent = ["---\n", newFrontMatter, "---", pageContent].join("") const newEncodedContent = Base64.encode(newContent) await newIsomerFile.create(fileName, newEncodedContent) diff --git a/src/routes/v1/authenticatedSites/directory.js b/src/routes/v1/authenticatedSites/directory.js index 73d1a3058..70f81f776 100644 --- a/src/routes/v1/authenticatedSites/directory.js +++ b/src/routes/v1/authenticatedSites/directory.js @@ -9,8 +9,9 @@ const { Directory, FolderType } = require("@classes/Directory") // List pages and directories in folder async function listDirectoryContent(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, path } = req.params + const { accessToken } = userWithSiteSessionData const decodedPath = decodeURIComponent(path) diff --git a/src/routes/v1/authenticatedSites/documents.js b/src/routes/v1/authenticatedSites/documents.js index 4cc88b567..79aa8c7e3 100644 --- a/src/routes/v1/authenticatedSites/documents.js +++ b/src/routes/v1/authenticatedSites/documents.js @@ -35,8 +35,9 @@ const extractDirectoryAndFileName = (documentName) => { // List documents async function listDocuments(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerFile = new File(accessToken, siteName) const documentType = new DocumentType() @@ -48,7 +49,8 @@ async function listDocuments(req, res) { // Create new document async function createNewDocument(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName } = req.params const { documentName, documentDirectory, content } = req.body @@ -65,8 +67,9 @@ async function createNewDocument(req, res) { // Read document async function readDocument(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, documentName } = req.params + const { accessToken } = userWithSiteSessionData // get document directory const { documentDirectory, documentFileName } = extractDirectoryAndFileName( @@ -85,7 +88,8 @@ async function readDocument(req, res) { // Update document async function updateDocument(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, documentName } = req.params const { content, sha } = req.body @@ -103,7 +107,8 @@ async function updateDocument(req, res) { // Delete document async function deleteDocument(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, documentName } = req.params const { sha } = req.body @@ -118,7 +123,8 @@ async function deleteDocument(req, res) { // Rename document async function renameDocument(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, documentName, newDocumentName } = req.params @@ -148,7 +154,8 @@ async function renameDocument(req, res) { // Move document async function moveDocument(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, documentName, newDocumentName } = req.params diff --git a/src/routes/v1/authenticatedSites/folders.js b/src/routes/v1/authenticatedSites/folders.js index e1f0b2000..53d26b200 100644 --- a/src/routes/v1/authenticatedSites/folders.js +++ b/src/routes/v1/authenticatedSites/folders.js @@ -1,6 +1,5 @@ const Bluebird = require("bluebird") const express = require("express") -const yaml = require("yaml") const { attachReadRouteHandlerWrapper, @@ -13,13 +12,18 @@ const { CollectionConfig } = require("@classes/Config") const { File, CollectionPageType } = require("@classes/File") const { getTree, sendTree, deslugifyCollectionName } = require("@utils/utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) // List pages and directories from all folders async function listAllFolderContent(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerCollection = new Collection(accessToken, siteName) const allFolders = IsomerCollection.list() @@ -37,8 +41,10 @@ async function listAllFolderContent(req, res) { // Delete subfolder async function deleteSubfolder(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + const { userWithSiteSessionData, githubSessionData } = res.locals const { siteName, folderName, subfolderName } = req.params + const { accessToken } = userWithSiteSessionData + const { currentCommitSha, treeSha } = githubSessionData.getGithubState() // Delete subfolder const commitMessage = `Delete subfolder ${folderName}/${subfolderName}` @@ -76,8 +82,9 @@ async function deleteSubfolder(req, res) { // Rename subfolder async function renameSubfolder(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, folderName, subfolderName, newSubfolderName } = req.params + const { accessToken } = userWithSiteSessionData // Rename subfolder by: // 1. Creating new files in the newSubfolderName folder @@ -109,7 +116,7 @@ async function renameSubfolder(req, res) { const decodedContent = Base64.decode(content) const results = decodedContent.split("---") - const frontMatter = yaml.parse(results[1]) // get the front matter as an object + const frontMatter = sanitizedYamlParse(results[1]) // get the front matter as an object const mdBody = results.slice(2).join("---") // Modify `third_nav_title` and save as new file in newSubfolderName @@ -120,7 +127,7 @@ async function renameSubfolder(req, res) { const newContent = [ "---\n", - yaml.stringify(newFrontMatter), + sanitizedYamlStringify(newFrontMatter), "---\n", mdBody, ].join("") diff --git a/src/routes/v1/authenticatedSites/homepage.js b/src/routes/v1/authenticatedSites/homepage.js index 266f12713..792fd1561 100644 --- a/src/routes/v1/authenticatedSites/homepage.js +++ b/src/routes/v1/authenticatedSites/homepage.js @@ -16,7 +16,8 @@ const HOMEPAGE_INDEX_PATH = "index.md" // Empty string // Read homepage index file async function readHomepage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName } = req.params @@ -36,7 +37,8 @@ async function readHomepage(req, res) { // Update homepage index file async function updateHomepage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName } = req.params const { content, sha } = req.body diff --git a/src/routes/v1/authenticatedSites/images.js b/src/routes/v1/authenticatedSites/images.js index 4bda0d184..d0b7b2b54 100644 --- a/src/routes/v1/authenticatedSites/images.js +++ b/src/routes/v1/authenticatedSites/images.js @@ -36,8 +36,9 @@ const extractDirectoryAndFileName = (imageName) => { // List images async function listImages(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerFile = new File(accessToken, siteName) const imageType = new ImageType() @@ -49,7 +50,8 @@ async function listImages(req, res) { // Create new image async function createNewImage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName } = req.params const { imageName, imageDirectory, content } = req.body @@ -66,7 +68,8 @@ async function createNewImage(req, res) { // Read image async function readImage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, imageName } = req.params @@ -88,7 +91,8 @@ async function readImage(req, res) { // Update image async function updateImage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, imageName } = req.params const { content, sha } = req.body @@ -106,7 +110,8 @@ async function updateImage(req, res) { // Delete image async function deleteImage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, imageName } = req.params const { sha } = req.body @@ -121,7 +126,8 @@ async function deleteImage(req, res) { // Rename image async function renameImage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, imageName, newImageName } = req.params @@ -150,7 +156,8 @@ async function renameImage(req, res) { // Move image async function moveImage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, imageName, newImageName } = req.params diff --git a/src/routes/v1/authenticatedSites/index.js b/src/routes/v1/authenticatedSites/index.js index 31ce5d616..e9d78524d 100644 --- a/src/routes/v1/authenticatedSites/index.js +++ b/src/routes/v1/authenticatedSites/index.js @@ -1,3 +1,5 @@ +import { attachSiteHandler } from "@root/middleware" + const express = require("express") const collectionPagesRouter = require("@routes/v1/authenticatedSites/collectionPages") @@ -16,14 +18,19 @@ const resourceRoomRouter = require("@routes/v1/authenticatedSites/resourceRoom") const resourcesRouter = require("@routes/v1/authenticatedSites/resources") const settingsRouter = require("@routes/v1/authenticatedSites/settings") -const getAuthenticatedSitesSubrouter = ({ authMiddleware, apiLogger }) => { +const getAuthenticatedSitesSubrouter = ({ + authenticationMiddleware, + authorizationMiddleware, + apiLogger +}) => { const authenticatedSitesSubrouter = express.Router({ mergeParams: true }) - authenticatedSitesSubrouter.use(authMiddleware.verifyJwt) - authenticatedSitesSubrouter.use(authMiddleware.useSiteAccessTokenIfAvailable) + authenticatedSitesSubrouter.use(authenticationMiddleware.verifyAccess) + authenticatedSitesSubrouter.use(attachSiteHandler) // NOTE: apiLogger needs to be after `verifyJwt` as it logs the github username // which is only available after verifying that the jwt is valid authenticatedSitesSubrouter.use(apiLogger) + authenticatedSitesSubrouter.use(authorizationMiddleware.verifySiteMember) authenticatedSitesSubrouter.use("/pages", pagesRouter) authenticatedSitesSubrouter.use("/collections", collectionsRouter) diff --git a/src/routes/v1/authenticatedSites/mediaSubfolder.js b/src/routes/v1/authenticatedSites/mediaSubfolder.js index be077a7d7..389bf6f6c 100644 --- a/src/routes/v1/authenticatedSites/mediaSubfolder.js +++ b/src/routes/v1/authenticatedSites/mediaSubfolder.js @@ -13,8 +13,9 @@ const { MediaSubfolder } = require("@classes/MediaSubfolder") // Create new collection async function createSubfolder(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, mediaType, folderPath } = req.params + const { accessToken } = userWithSiteSessionData const processedFolderPath = decodeURIComponent(folderPath) @@ -30,8 +31,10 @@ async function createSubfolder(req, res) { // Delete collection async function deleteSubfolder(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, mediaType, folderPath } = req.params + const { accessToken } = userWithSiteSessionData + const { currentCommitSha, treeSha } = userWithSiteSessionData.getGithubState() const processedFolderPath = decodeURIComponent(folderPath) @@ -51,8 +54,10 @@ async function deleteSubfolder(req, res) { // Rename collection async function renameSubfolder(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, mediaType, oldFolderPath, newFolderPath } = req.params + const { accessToken } = userWithSiteSessionData + const { currentCommitSha, treeSha } = userWithSiteSessionData.getGithubState() const processedOldFolderPath = decodeURIComponent(oldFolderPath) const processedNewFolderPath = decodeURIComponent(newFolderPath) diff --git a/src/routes/v1/authenticatedSites/navigation.js b/src/routes/v1/authenticatedSites/navigation.js index 9cae08a8f..6830e667e 100644 --- a/src/routes/v1/authenticatedSites/navigation.js +++ b/src/routes/v1/authenticatedSites/navigation.js @@ -1,5 +1,9 @@ const express = require("express") -const yaml = require("yaml") + +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) @@ -15,7 +19,8 @@ const { File, DataType } = require("@classes/File") const NAVIGATION_PATH = "navigation.yml" async function getNavigation(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName } = req.params @@ -26,13 +31,14 @@ async function getNavigation(req, res) { return res.status(200).json({ sha, - content: yaml.parse(Base64.decode(content)), + content: sanitizedYamlParse(Base64.decode(content)), }) } async function updateNavigation(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const { content, sha } = req.body @@ -41,7 +47,7 @@ async function updateNavigation(req, res) { IsomerFile.setFileType(dataType) await IsomerFile.update( NAVIGATION_PATH, - Base64.encode(yaml.stringify(content)), + Base64.encode(sanitizedYamlStringify(content)), sha ) diff --git a/src/routes/v1/authenticatedSites/netlifyToml.js b/src/routes/v1/authenticatedSites/netlifyToml.js index 17d379e90..4a5add282 100644 --- a/src/routes/v1/authenticatedSites/netlifyToml.js +++ b/src/routes/v1/authenticatedSites/netlifyToml.js @@ -11,8 +11,9 @@ const router = express.Router({ mergeParams: true }) // List resources async function getNetlifyToml(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const netlifyTomlFile = new NetlifyToml(accessToken, siteName) diff --git a/src/routes/v1/authenticatedSites/pages.js b/src/routes/v1/authenticatedSites/pages.js index cde5de760..8543300fe 100644 --- a/src/routes/v1/authenticatedSites/pages.js +++ b/src/routes/v1/authenticatedSites/pages.js @@ -1,5 +1,4 @@ const express = require("express") -const yaml = require("yaml") // Import middleware const { @@ -15,12 +14,17 @@ const { File, PageType, CollectionPageType } = require("@classes/File") const { Subfolder } = require("@classes/Subfolder") const { deslugifyCollectionName } = require("@utils/utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) async function listPages(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerFile = new File(accessToken, siteName) const pageType = new PageType() @@ -31,7 +35,8 @@ async function listPages(req, res) { } async function createPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName: encodedPageName } = req.params const { content: pageContent } = req.body @@ -47,7 +52,8 @@ async function createPage(req, res) { // Read page async function readPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName: encodedPageName } = req.params const pageName = decodeURIComponent(encodedPageName) @@ -67,7 +73,8 @@ async function readPage(req, res) { // Update page async function updatePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName: encodedPageName } = req.params const { content: pageContent, sha } = req.body @@ -90,7 +97,8 @@ async function updatePage(req, res) { // Delete page async function deletePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName: encodedPageName } = req.params const { sha } = req.body @@ -106,7 +114,8 @@ async function deletePage(req, res) { // Rename page async function renamePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, @@ -136,7 +145,8 @@ async function renamePage(req, res) { // Move unlinked pages async function moveUnlinkedPages(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, newPagePath } = req.params const { files } = req.body const processedTargetPathTokens = decodeURIComponent(newPagePath).split("/") @@ -190,9 +200,9 @@ async function moveUnlinkedPages(req, res) { const [unused, encodedFrontMatter, pageContent] = Base64.decode( content ).split("---") - const frontMatter = yaml.parse(encodedFrontMatter) + const frontMatter = sanitizedYamlParse(encodedFrontMatter) frontMatter.third_nav_title = deslugifyCollectionName(targetSubfolderName) - const newFrontMatter = yaml.stringify(frontMatter) + const newFrontMatter = sanitizedYamlStringify(frontMatter) const newContent = ["---\n", newFrontMatter, "---", pageContent].join("") const newEncodedContent = Base64.encode(newContent) await newIsomerFile.create(fileName, newEncodedContent) diff --git a/src/routes/v1/authenticatedSites/resourcePages.js b/src/routes/v1/authenticatedSites/resourcePages.js index 4b3f56f3f..445a08207 100644 --- a/src/routes/v1/authenticatedSites/resourcePages.js +++ b/src/routes/v1/authenticatedSites/resourcePages.js @@ -18,7 +18,8 @@ const { ResourceRoom } = require("@classes/ResourceRoom") // List pages in resource async function listResourcePages(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, resourceName } = req.params const ResourceRoomInstance = new ResourceRoom(accessToken, siteName) @@ -45,7 +46,8 @@ async function listResourcePages(req, res) { // Create new page in resource async function createNewResourcePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, resourceName, pageName } = req.params const { content: pageContent } = req.body @@ -74,7 +76,8 @@ async function createNewResourcePage(req, res) { // Read page in resource async function readResourcePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName, resourceName } = req.params @@ -96,7 +99,8 @@ async function readResourcePage(req, res) { // Update page in resource async function updateResourcePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName, resourceName } = req.params const { content: pageContent, sha } = req.body @@ -122,7 +126,8 @@ async function updateResourcePage(req, res) { // Delete page in resource async function deleteResourcePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName, resourceName } = req.params const { sha } = req.body @@ -139,7 +144,8 @@ async function deleteResourcePage(req, res) { // Rename page in resource async function renameResourcePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals + const { accessToken } = userWithSiteSessionData const { siteName, pageName, resourceName, newPageName } = req.params const { sha, content: pageContent } = req.body diff --git a/src/routes/v1/authenticatedSites/resourceRoom.js b/src/routes/v1/authenticatedSites/resourceRoom.js index f0fae0ba6..db0b8946b 100644 --- a/src/routes/v1/authenticatedSites/resourceRoom.js +++ b/src/routes/v1/authenticatedSites/resourceRoom.js @@ -12,8 +12,9 @@ const { ResourceRoom } = require("@classes/ResourceRoom") // Get resource room name async function getResourceRoomName(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerResourceRoom = new ResourceRoom(accessToken, siteName) const resourceRoom = await IsomerResourceRoom.get() @@ -23,9 +24,10 @@ async function getResourceRoomName(req, res) { // Create resource room async function createResourceRoom(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params const { resourceRoom } = req.body + const { accessToken } = userWithSiteSessionData // TO-DO: // Validate resourceRoom @@ -38,8 +40,9 @@ async function createResourceRoom(req, res) { // Rename resource room name async function renameResourceRoom(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, resourceRoom } = req.params + const { accessToken } = userWithSiteSessionData // TO-DO: // Validate resourceRoom @@ -52,8 +55,9 @@ async function renameResourceRoom(req, res) { // Delete resource room async function deleteResourceRoom(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerResourceRoom = new ResourceRoom(accessToken, siteName) await IsomerResourceRoom.delete() diff --git a/src/routes/v1/authenticatedSites/resources.js b/src/routes/v1/authenticatedSites/resources.js index 66e324ee4..093f5d36f 100644 --- a/src/routes/v1/authenticatedSites/resources.js +++ b/src/routes/v1/authenticatedSites/resources.js @@ -19,8 +19,9 @@ const { ResourceRoom } = require("@classes/ResourceRoom") // List resources async function listResources(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerResourceRoom = new ResourceRoom(accessToken, siteName) const resourceRoomName = await IsomerResourceRoom.get() @@ -33,9 +34,10 @@ async function listResources(req, res) { // Create new resource async function createNewResource(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params const { resourceName } = req.body + const { accessToken } = userWithSiteSessionData const IsomerResourceRoom = new ResourceRoom(accessToken, siteName) const resourceRoomName = await IsomerResourceRoom.get() @@ -48,8 +50,9 @@ async function createNewResource(req, res) { // Delete resource async function deleteResource(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, resourceName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerResourceRoom = new ResourceRoom(accessToken, siteName) const resourceRoomName = await IsomerResourceRoom.get() @@ -62,8 +65,9 @@ async function deleteResource(req, res) { // Rename resource async function renameResource(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, resourceName, newResourceName } = req.params + const { accessToken } = userWithSiteSessionData const IsomerResourceRoom = new ResourceRoom(accessToken, siteName) const resourceRoomName = await IsomerResourceRoom.get() @@ -78,9 +82,10 @@ async function renameResource(req, res) { /* eslint-disable no-await-in-loop, no-restricted-syntax */ // Move resource async function moveResources(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName, resourceName, newResourceName } = req.params const { files } = req.body + const { accessToken } = userWithSiteSessionData const ResourceRoomInstance = new ResourceRoom(accessToken, siteName) const resourceRoomName = await ResourceRoomInstance.get() diff --git a/src/routes/v1/authenticatedSites/settings.js b/src/routes/v1/authenticatedSites/settings.js index ae28cda35..72d6e85b6 100644 --- a/src/routes/v1/authenticatedSites/settings.js +++ b/src/routes/v1/authenticatedSites/settings.js @@ -12,8 +12,9 @@ const { const { Settings } = require("@classes/Settings") async function getSettings(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const settingsFile = new Settings(accessToken, siteName) const settings = await settingsFile.get() @@ -21,8 +22,9 @@ async function getSettings(req, res) { } async function updateSettings(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const { siteName } = req.params + const { accessToken } = userWithSiteSessionData const settings = new Settings(accessToken, siteName) await settings.post(req.body) diff --git a/src/routes/v2/__tests__/Auth.spec.js b/src/routes/v2/__tests__/Auth.spec.js index 2b2ee111f..e5b64fcac 100644 --- a/src/routes/v2/__tests__/Auth.spec.js +++ b/src/routes/v2/__tests__/Auth.spec.js @@ -1,29 +1,54 @@ const express = require("express") +const session = require("express-session") const request = require("supertest") +const { config } = require("@config/config") + const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { mockUserSessionData, mockEmail } = require("@fixtures/sessionData") +const { rateLimiter } = require("@root/services/utilServices/RateLimiter") const { CSRF_COOKIE_NAME, COOKIE_NAME, AuthRouter } = require("../auth") -const { FRONTEND_URL } = process.env +const FRONTEND_URL = config.get("app.frontendUrl") const csrfState = "csrfState" const cookieToken = "cookieToken" -const accessToken = undefined +const MOCK_USER_ID = "userId" describe("Unlinked Pages Router", () => { + jest.mock("@logger/logger", { + info: jest.fn(), + }) + const mockAuthService = { getAuthRedirectDetails: jest.fn(), - getGithubAuthToken: jest.fn(), + getUserInfoFromGithubAuth: jest.fn(), getUserInfo: jest.fn(), + sendOtp: jest.fn(), + verifyOtp: jest.fn(), + } + const mockAuthenticationMiddleware = { + verifyJwt: jest.fn().mockImplementation((req, res, next) => next()), } const router = new AuthRouter({ authService: mockAuthService, + authenticationMiddleware: mockAuthenticationMiddleware, + rateLimiter, }) const subrouter = express() + const options = { + resave: true, + saveUninitialized: true, + secret: "blah", + cookie: { + maxAge: 1209600000, + }, + } + subrouter.use(session(options)) // We can use read route handler here because we don't need to lock the repo subrouter.get( @@ -31,6 +56,8 @@ describe("Unlinked Pages Router", () => { attachReadRouteHandlerWrapper(router.authRedirect) ) subrouter.get("/", attachReadRouteHandlerWrapper(router.githubAuth)) + subrouter.post("/login", attachReadRouteHandlerWrapper(router.login)) + subrouter.post("/verify", attachReadRouteHandlerWrapper(router.verify)) subrouter.delete("/logout", attachReadRouteHandlerWrapper(router.logout)) subrouter.get("/whoami", attachReadRouteHandlerWrapper(router.whoami)) const app = generateRouter(subrouter) @@ -63,7 +90,7 @@ describe("Unlinked Pages Router", () => { const state = "state" const token = "token" it("retrieves the token and redirects back to the correct page after github auth", async () => { - mockAuthService.getGithubAuthToken.mockResolvedValueOnce({ + mockAuthService.getUserInfoFromGithubAuth.mockResolvedValueOnce({ token, }) @@ -71,18 +98,38 @@ describe("Unlinked Pages Router", () => { .get(`/?code=${code}&state=${state}`) .set("Cookie", `${CSRF_COOKIE_NAME}=${csrfState};`) - expect(mockAuthService.getGithubAuthToken).toHaveBeenCalledWith({ + expect(mockAuthService.getUserInfoFromGithubAuth).toHaveBeenCalledWith({ csrfState, code, state, }) expect(resp.status).toEqual(302) expect(resp.headers.location).toContain(`${FRONTEND_URL}/sites`) - expect(resp.headers["set-cookie"]).toEqual( - expect.arrayContaining([expect.stringContaining(COOKIE_NAME)]) + expect(resp.headers["set-cookie"]).toBeTruthy() + }) + }) + describe("login", () => { + it("calls the service to send otp", async () => { + await request(app).post(`/login`).send({ email: mockEmail }).expect(200) + expect(mockAuthService.sendOtp).toHaveBeenCalledWith( + mockEmail.toLowerCase() ) }) }) + describe("verify", () => { + const mockOtp = "123456" + mockAuthService.verifyOtp.mockImplementationOnce(() => ({ + email: mockEmail, + })) + it("adds the cookie on login", async () => { + mockAuthService.getAuthRedirectDetails.mockResolvedValueOnce(cookieToken) + await request(app) + .post(`/verify`) + .send({ email: mockEmail, otp: mockOtp }) + .set("Cookie", `${COOKIE_NAME}=${cookieToken}`) + .expect(200) + }) + }) describe("logout", () => { it("removes cookies on logout", async () => { const resp = await request(app) @@ -103,17 +150,18 @@ describe("Unlinked Pages Router", () => { }) describe("whoami", () => { - const userId = "userId" it("returns user info if found", async () => { const expectedResponse = { - userId, + userId: MOCK_USER_ID, } mockAuthService.getUserInfo.mockResolvedValueOnce(expectedResponse) const resp = await request(app).get(`/whoami`).expect(200) expect(resp.body).toStrictEqual(expectedResponse) - expect(mockAuthService.getUserInfo).toHaveBeenCalledWith({ accessToken }) + expect(mockAuthService.getUserInfo).toHaveBeenCalledWith( + mockUserSessionData + ) }) it("sends a 401 if user not found", async () => { @@ -121,7 +169,9 @@ describe("Unlinked Pages Router", () => { await request(app).get(`/whoami`).expect(401) - expect(mockAuthService.getUserInfo).toHaveBeenCalledWith({ accessToken }) + expect(mockAuthService.getUserInfo).toHaveBeenCalledWith( + mockUserSessionData + ) }) }) }) diff --git a/src/routes/v2/auth.js b/src/routes/v2/auth.js index 1ddc9ad9f..e50f4cec1 100644 --- a/src/routes/v2/auth.js +++ b/src/routes/v2/auth.js @@ -1,25 +1,32 @@ const autoBind = require("auto-bind") const express = require("express") +const { config } = require("@config/config") + +const logger = require("@logger/logger") + // Import middleware const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") -const { FRONTEND_URL } = process.env +const FRONTEND_URL = config.get("app.frontendUrl") const { isSecure } = require("@utils/auth-utils") -const AUTH_TOKEN_EXPIRY_MS = parseInt( - process.env.AUTH_TOKEN_EXPIRY_DURATION_IN_MILLISECONDS, - 10 -) +const AUTH_TOKEN_EXPIRY_MS = config.get("auth.tokenExpiry") const CSRF_TOKEN_EXPIRY_MS = 600000 const CSRF_COOKIE_NAME = "isomer-csrf" const COOKIE_NAME = "isomercms" class AuthRouter { - constructor({ authService, authMiddleware, apiLogger }) { + constructor({ + authService, + authenticationMiddleware, + apiLogger, + rateLimiter, + }) { this.authService = authService - this.authMiddleware = authMiddleware + this.authenticationMiddleware = authenticationMiddleware this.apiLogger = apiLogger + this.rateLimiter = rateLimiter // We need to bind all methods because we don't invoke them from the class directly autoBind(this) } @@ -44,7 +51,7 @@ class AuthRouter { const cookieSettings = { expires: csrfTokenExpiry, httpOnly: true, - secure: isSecure(), + secure: isSecure, } res.cookie(CSRF_COOKIE_NAME, cookieToken, cookieSettings) return res.redirect(redirectUrl) @@ -54,36 +61,52 @@ class AuthRouter { const csrfState = req.cookies[CSRF_COOKIE_NAME] const { code, state } = req.query - const token = await this.authService.getGithubAuthToken({ + const userInfo = await this.authService.getUserInfoFromGithubAuth({ csrfState, code, state, }) - const authTokenExpiry = new Date() - // getTime allows this to work across timezones - authTokenExpiry.setTime(authTokenExpiry.getTime() + AUTH_TOKEN_EXPIRY_MS) - const cookieSettings = { - path: "/", - expires: authTokenExpiry, - httpOnly: true, - sameSite: true, - secure: isSecure(), - } - res.cookie(COOKIE_NAME, token, cookieSettings) + logger.info(`User ${userInfo.email} successfully logged in`) + Object.assign(req.session, { userInfo }) return res.redirect(`${FRONTEND_URL}/sites`) } + async login(req, res) { + const { email: rawEmail } = req.body + const email = rawEmail.toLowerCase() + try { + await this.authService.sendOtp(email) + } catch (err) { + // Log, but don't return so responses are indistinguishable + logger.error( + `Error occurred when attempting to login user ${email}: ${err}` + ) + } + return res.sendStatus(200) + } + + async verify(req, res) { + const { email: rawEmail, otp } = req.body + const email = rawEmail.toLowerCase() + const userInfo = await this.authService.verifyOtp({ email, otp }) + Object.assign(req.session, { userInfo }) + logger.info(`User ${userInfo.email} successfully logged in`) + return res.sendStatus(200) + } + async logout(req, res) { this.clearIsomerCookies(res) + req.session.destroy() return res.sendStatus(200) } async whoami(req, res) { - const { accessToken } = res.locals + const { userSessionData } = res.locals - const userInfo = await this.authService.getUserInfo({ accessToken }) + const userInfo = await this.authService.getUserInfo(userSessionData) if (!userInfo) { this.clearIsomerCookies(res) + req.session.destroy() return res.sendStatus(401) } return res.status(200).json(userInfo) @@ -93,15 +116,18 @@ class AuthRouter { const router = express.Router() router.use(this.apiLogger) + router.use(this.rateLimiter) router.get( "/github-redirect", attachReadRouteHandlerWrapper(this.authRedirect) ) router.get("/", attachReadRouteHandlerWrapper(this.githubAuth)) + router.post("/login", attachReadRouteHandlerWrapper(this.login)) + router.post("/verify", attachReadRouteHandlerWrapper(this.verify)) router.delete("/logout", attachReadRouteHandlerWrapper(this.logout)) router.get( "/whoami", - this.authMiddleware.whoamiAuth, + this.authenticationMiddleware.verifyAccess, attachReadRouteHandlerWrapper(this.whoami) ) diff --git a/src/routes/v2/authenticated/__tests__/NetlifyToml.spec.js b/src/routes/v2/authenticated/__tests__/NetlifyToml.spec.js index da284de6a..236b4039d 100644 --- a/src/routes/v2/authenticated/__tests__/NetlifyToml.spec.js +++ b/src/routes/v2/authenticated/__tests__/NetlifyToml.spec.js @@ -4,6 +4,7 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { mockUserSessionData } = require("@fixtures/sessionData") const { NetlifyTomlRouter } = require("../netlifyToml") @@ -26,10 +27,6 @@ describe("NetlifyToml Router", () => { ) const app = generateRouter(subrouter) - const accessToken = undefined // Can't set request fields - will always be undefined - - const reqDetails = { accessToken } - beforeEach(() => { jest.clearAllMocks() }) @@ -42,7 +39,9 @@ describe("NetlifyToml Router", () => { const resp = await request(app).get(`/netlifyToml`).expect(200) expect(resp.body).toStrictEqual({ netlifyTomlHeaderValues }) - expect(mockNetlifyTomlService.read).toHaveBeenCalledWith(reqDetails) + expect(mockNetlifyTomlService.read).toHaveBeenCalledWith( + mockUserSessionData + ) }) }) }) diff --git a/src/routes/v2/authenticated/__tests__/Notifications.spec.ts b/src/routes/v2/authenticated/__tests__/Notifications.spec.ts new file mode 100644 index 000000000..0eca69a4d --- /dev/null +++ b/src/routes/v2/authenticated/__tests__/Notifications.spec.ts @@ -0,0 +1,113 @@ +import express from "express" +import request from "supertest" + +import { attachReadRouteHandlerWrapper } from "@middleware/routeHandler" + +import { generateRouter } from "@fixtures/app" +import { mockSiteName, mockIsomerUserId } from "@fixtures/sessionData" +import { AuthorizationMiddleware } from "@root/middleware/authorization" +import { NotificationsRouter as _NotificationsRouter } from "@root/routes/v2/authenticated/notifications" +import NotificationsService from "@services/identity/NotificationsService" + +describe("Notifications Router", () => { + const mockNotificationsService = { + listRecent: jest.fn(), + listAll: jest.fn(), + markNotificationsAsRead: jest.fn(), + } + const mockAuthorizationMiddleware = { + verifySiteMember: jest.fn(), + } + + const NotificationsRouter = new _NotificationsRouter({ + notificationsService: (mockNotificationsService as unknown) as NotificationsService, + authorizationMiddleware: (mockAuthorizationMiddleware as unknown) as AuthorizationMiddleware, + }) + + const subrouter = express() + + // We can use read route handler here because we don't need to lock the repo + subrouter.get( + "/:siteName/notifications/", + attachReadRouteHandlerWrapper(NotificationsRouter.getRecentNotifications) + ) + subrouter.get( + "/:siteName/notifications/allNotifications", + attachReadRouteHandlerWrapper(NotificationsRouter.getAllNotifications) + ) + subrouter.post( + "/:siteName/notifications/", + attachReadRouteHandlerWrapper(NotificationsRouter.markNotificationsAsRead) + ) + + const app = generateRouter(subrouter) + + beforeEach(() => { + jest.clearAllMocks() + }) + + describe("getRecentNotifications", () => { + it("should call the underlying service when there is a GET request", async () => { + // Arrange + const mockNotificationsValue: never[] = [] + mockNotificationsService.listRecent.mockResolvedValueOnce( + mockNotificationsValue + ) + + // Act + const resp = await request(app) + .get(`/${mockSiteName}/notifications/`) + .expect(200) + + // Assert + expect(resp.body).toStrictEqual(mockNotificationsValue) + expect(mockNotificationsService.listRecent).toHaveBeenCalledWith({ + siteName: mockSiteName, + userId: mockIsomerUserId, + }) + }) + }) + + describe("getAllNotifications", () => { + it("should call the underlying service when there is a GET request", async () => { + // Arrange + const mockNotificationsValue: never[] = [] + mockNotificationsService.listAll.mockResolvedValueOnce( + mockNotificationsValue + ) + + // Act + const resp = await request(app) + .get(`/${mockSiteName}/notifications/allNotifications`) + .expect(200) + + // Assert + expect(resp.body).toStrictEqual(mockNotificationsValue) + expect(mockNotificationsService.listAll).toHaveBeenCalledWith({ + siteName: mockSiteName, + userId: mockIsomerUserId, + }) + }) + }) + + describe("markNotificationsAsRead", () => { + it("should call the underlying service when there is a POST request", async () => { + // Arrange + const mockRequestBody = {} + + // Act + await request(app) + .post(`/${mockSiteName}/notifications/`) + .send(mockRequestBody) + .expect(200) + + // Assert + expect( + mockNotificationsService.markNotificationsAsRead + ).toHaveBeenCalledWith({ + siteName: mockSiteName, + userId: mockIsomerUserId, + }) + }) + }) +}) diff --git a/src/routes/v2/authenticated/__tests__/Sites.spec.js b/src/routes/v2/authenticated/__tests__/Sites.spec.js deleted file mode 100644 index ad0e19050..000000000 --- a/src/routes/v2/authenticated/__tests__/Sites.spec.js +++ /dev/null @@ -1,113 +0,0 @@ -const express = require("express") -const request = require("supertest") - -const { NotFoundError } = require("@errors/NotFoundError") - -const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") - -const { generateRouter } = require("@fixtures/app") - -const { SitesRouter } = require("../sites") - -// Can't set request fields - will always be undefined -const userId = undefined -const accessToken = undefined - -const siteName = "siteName" - -const reqDetails = { siteName, accessToken } - -describe("Sites Router", () => { - const mockSitesService = { - getSites: jest.fn(), - checkHasAccess: jest.fn(), - getLastUpdated: jest.fn(), - getStagingUrl: jest.fn(), - } - - const router = new SitesRouter({ - sitesService: mockSitesService, - }) - - const subrouter = express() - - // We can use read route handler here because we don't need to lock the repo - subrouter.get("/", attachReadRouteHandlerWrapper(router.getSites)) - subrouter.get( - "/:siteName", - attachReadRouteHandlerWrapper(router.checkHasAccess) - ) - subrouter.get( - "/:siteName/lastUpdated", - attachReadRouteHandlerWrapper(router.getLastUpdated) - ) - subrouter.get( - "/:siteName/stagingUrl", - attachReadRouteHandlerWrapper(router.getStagingUrl) - ) - const app = generateRouter(subrouter) - - beforeEach(() => { - jest.clearAllMocks() - }) - - describe("getSites", () => { - it("returns the list of sites accessible to the user", async () => { - const sitesResp = ["site1", "site2"] - mockSitesService.getSites.mockResolvedValueOnce(sitesResp) - - const resp = await request(app).get(`/`).expect(200) - - expect(resp.body).toStrictEqual({ siteNames: sitesResp }) - expect(mockSitesService.getSites).toHaveBeenCalledWith({ accessToken }) - }) - }) - - describe("checkHasAccess", () => { - it("rejects if user has no access to a site", async () => { - mockSitesService.checkHasAccess.mockRejectedValueOnce( - new NotFoundError("") - ) - - await request(app).get(`/${siteName}`).expect(404) - - expect(mockSitesService.checkHasAccess).toHaveBeenCalledWith(reqDetails, { - userId, - }) - }) - - it("allows if user has access to a site", async () => { - await request(app).get(`/${siteName}`).expect(200) - - expect(mockSitesService.checkHasAccess).toHaveBeenCalledWith(reqDetails, { - userId, - }) - }) - }) - - describe("getLastUpdated", () => { - it("returns the last updated time", async () => { - const lastUpdated = "last-updated" - mockSitesService.getLastUpdated.mockResolvedValueOnce(lastUpdated) - - const resp = await request(app) - .get(`/${siteName}/lastUpdated`) - .expect(200) - - expect(resp.body).toStrictEqual({ lastUpdated }) - expect(mockSitesService.getLastUpdated).toHaveBeenCalledWith(reqDetails) - }) - }) - - describe("getStagingUrl", () => { - it("returns the last updated time", async () => { - const stagingUrl = "staging-url" - mockSitesService.getStagingUrl.mockResolvedValueOnce(stagingUrl) - - const resp = await request(app).get(`/${siteName}/stagingUrl`).expect(200) - - expect(resp.body).toStrictEqual({ stagingUrl }) - expect(mockSitesService.getStagingUrl).toHaveBeenCalledWith(reqDetails) - }) - }) -}) diff --git a/src/routes/v2/authenticated/__tests__/Sites.spec.ts b/src/routes/v2/authenticated/__tests__/Sites.spec.ts new file mode 100644 index 000000000..485092714 --- /dev/null +++ b/src/routes/v2/authenticated/__tests__/Sites.spec.ts @@ -0,0 +1,143 @@ +import express from "express" +import request from "supertest" + +import type { AuthorizationMiddleware } from "@middleware/authorization" +import { attachReadRouteHandlerWrapper } from "@middleware/routeHandler" + +import { generateRouter } from "@fixtures/app" +import { + mockSiteName, + mockUserSessionData, + mockUserWithSiteSessionData, +} from "@fixtures/sessionData" +import type SitesService from "@services/identity/SitesService" + +import { SitesRouter } from "../sites" + +describe("Sites Router", () => { + const mockSitesService = { + getSites: jest.fn(), + getLastUpdated: jest.fn(), + getStagingUrl: jest.fn(), + getSiteUrl: jest.fn(), + getSiteInfo: jest.fn(), + } + + const mockAuthorizationMiddleware = { + verifySiteMember: jest.fn(), + } + + const router = new SitesRouter({ + sitesService: (mockSitesService as unknown) as SitesService, + authorizationMiddleware: (mockAuthorizationMiddleware as unknown) as AuthorizationMiddleware, + }) + + const subrouter = express() + + // We can use read route handler here because we don't need to lock the repo + subrouter.get("/", attachReadRouteHandlerWrapper(router.getSites)) + subrouter.get( + "/:siteName/lastUpdated", + attachReadRouteHandlerWrapper(router.getLastUpdated) + ) + subrouter.get( + "/:siteName/stagingUrl", + attachReadRouteHandlerWrapper(router.getStagingUrl) + ) + subrouter.get( + "/:siteName/siteUrl", + attachReadRouteHandlerWrapper(router.getSiteUrl) + ) + subrouter.get( + "/:siteName/info", + attachReadRouteHandlerWrapper(router.getSiteInfo) + ) + const app = generateRouter(subrouter) + + beforeEach(() => { + jest.clearAllMocks() + }) + + describe("getSites", () => { + it("returns the list of sites accessible to the user", async () => { + const sitesResp = ["site1", "site2"] + mockSitesService.getSites.mockResolvedValueOnce(sitesResp) + + const resp = await request(app).get(`/`).expect(200) + + expect(resp.body).toStrictEqual({ siteNames: sitesResp }) + expect(mockSitesService.getSites).toHaveBeenCalledWith( + mockUserSessionData + ) + }) + }) + + describe("getLastUpdated", () => { + it("returns the last updated time", async () => { + const lastUpdated = "last-updated" + mockSitesService.getLastUpdated.mockResolvedValueOnce(lastUpdated) + + const resp = await request(app) + .get(`/${mockSiteName}/lastUpdated`) + .expect(200) + + expect(resp.body).toStrictEqual({ lastUpdated }) + expect(mockSitesService.getLastUpdated).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) + }) + }) + + describe("getStagingUrl", () => { + it("returns the site's staging URL", async () => { + const stagingUrl = "staging-url" + mockSitesService.getStagingUrl.mockResolvedValueOnce(stagingUrl) + + const resp = await request(app) + .get(`/${mockSiteName}/stagingUrl`) + .expect(200) + + expect(resp.body).toStrictEqual({ stagingUrl }) + expect(mockSitesService.getStagingUrl).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) + }) + }) + + describe("getSiteUrl", () => { + it("returns the site's site URL", async () => { + const siteUrl = "prod-url" + mockSitesService.getSiteUrl.mockResolvedValueOnce(siteUrl) + + const resp = await request(app) + .get(`/${mockSiteName}/siteUrl`) + .expect(200) + + expect(resp.body).toStrictEqual({ siteUrl }) + expect(mockSitesService.getSiteUrl).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) + }) + }) + + describe("getSiteInfo", () => { + it("returns the site's info", async () => { + const siteInfo = { + savedAt: 12345678, + savedBy: "test@example.com", + publishedAt: 23456789, + publishedBy: "test2@example.com", + stagingUrl: "staging-url", + siteUrl: "prod-url", + } + mockSitesService.getSiteInfo.mockResolvedValueOnce(siteInfo) + + const resp = await request(app).get(`/${mockSiteName}/info`).expect(200) + + expect(resp.body).toStrictEqual(siteInfo) + expect(mockSitesService.getSiteInfo).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) + }) + }) +}) diff --git a/src/routes/v2/authenticated/__tests__/collaborators.spec.ts b/src/routes/v2/authenticated/__tests__/collaborators.spec.ts new file mode 100644 index 000000000..ed53caf79 --- /dev/null +++ b/src/routes/v2/authenticated/__tests__/collaborators.spec.ts @@ -0,0 +1,229 @@ +import express from "express" +import request from "supertest" + +import { attachReadRouteHandlerWrapper } from "@middleware/routeHandler" + +import { CollaboratorsRouter } from "@routes/v2/authenticated/collaborators" + +import { generateRouter } from "@fixtures/app" +import { mockSiteName, mockIsomerUserId } from "@fixtures/sessionData" +import { NotFoundError } from "@root/errors/NotFoundError" +import { UnprocessableError } from "@root/errors/UnprocessableError" +import { AuthorizationMiddleware } from "@root/middleware/authorization" +import CollaboratorsService from "@root/services/identity/CollaboratorsService" + +describe("Collaborator Router", () => { + const MOCK_EMAIL = "mockemail" + const MOCK_ACK_VALUE = true + const mockCollaboratorsService = { + create: jest.fn(), + delete: jest.fn(), + list: jest.fn(), + getRole: jest.fn(), + getStatistics: jest.fn(), + } + const mockAuthorizationMiddleware = { + verifySiteAdmin: jest.fn(), + verifySiteMember: jest.fn(), + } + + const collaboratorsRouter = new CollaboratorsRouter({ + collaboratorsService: (mockCollaboratorsService as unknown) as CollaboratorsService, + authorizationMiddleware: (mockAuthorizationMiddleware as unknown) as AuthorizationMiddleware, + }) + + const subrouter = express() + + // We can use read route handler here because we don't need to lock the repo + subrouter.get( + `/:siteName/collaborators/role`, + attachReadRouteHandlerWrapper(collaboratorsRouter.getCollaboratorRole) + ) + subrouter.get( + `/:siteName/collaborators/`, + attachReadRouteHandlerWrapper(collaboratorsRouter.listCollaborators) + ) + subrouter.post( + `/:siteName/collaborators/`, + attachReadRouteHandlerWrapper(collaboratorsRouter.createCollaborator) + ) + subrouter.delete( + `/:siteName/collaborators/:userId`, + attachReadRouteHandlerWrapper(collaboratorsRouter.deleteCollaborator) + ) + subrouter.get( + `/:siteName/collaborators/statistics`, + attachReadRouteHandlerWrapper( + collaboratorsRouter.getCollaboratorsStatistics + ) + ) + + const app = generateRouter(subrouter) + + beforeEach(() => { + jest.clearAllMocks() + }) + + describe("list collaborators", () => { + it("should retrieve the list of collaborators for a site", async () => { + // Arrange + const mockCollaboratorsValue: never[] = [] + const mockCollaboratorsResponse = { + collaborators: mockCollaboratorsValue, + } + mockCollaboratorsService.list.mockResolvedValue(mockCollaboratorsValue) + + // Act + const resp = await request(app) + .get(`/${mockSiteName}/collaborators/`) + .expect(200) + + // Assert + expect(resp.body).toStrictEqual(mockCollaboratorsResponse) + expect(mockCollaboratorsService.list).toHaveBeenCalledWith( + mockSiteName, + mockIsomerUserId + ) + }) + }) + + describe("create collaborators", () => { + it("should create a new collaborator", async () => { + // Arrange + const mockRequestBody = { email: MOCK_EMAIL, acknowledge: MOCK_ACK_VALUE } + + // Act + await request(app) + .post(`/${mockSiteName}/collaborators/`) + .send(mockRequestBody) + .expect(200) + + // Assert + expect(mockCollaboratorsService.create).toHaveBeenCalledWith( + mockSiteName, + MOCK_EMAIL, + MOCK_ACK_VALUE + ) + }) + }) + + describe("delete collaborator", () => { + it("should delete collaborator successfully", async () => { + // Arrange + mockCollaboratorsService.delete.mockResolvedValue(1) + + // Act + await request(app) + .delete(`/${mockSiteName}/collaborators/${mockIsomerUserId}`) + .expect(200) + + // Assert + expect(mockCollaboratorsService.delete).toHaveBeenCalledWith( + mockSiteName, + mockIsomerUserId + ) + }) + + it("should not delete last admin collaborator", async () => { + // Arrange + mockCollaboratorsService.delete.mockResolvedValue( + new UnprocessableError("") + ) + + // Act + await request(app) + .delete(`/${mockSiteName}/collaborators/${mockIsomerUserId}`) + .expect(422) + + // Assert + expect(mockCollaboratorsService.delete).toHaveBeenCalledWith( + mockSiteName, + mockIsomerUserId + ) + }) + + it("should not delete user if user is not a site collaborator", async () => { + // Arrange + mockCollaboratorsService.delete.mockResolvedValue(new NotFoundError("")) + + // Act + await request(app) + .delete(`/${mockSiteName}/collaborators/${mockIsomerUserId}`) + .expect(404) + + // Assert + expect(mockCollaboratorsService.delete).toHaveBeenCalledWith( + mockSiteName, + mockIsomerUserId + ) + }) + }) + + describe("get collaborator role", () => { + it("should get collaborator role", async () => { + // Arrange + const MOCK_COLLABORATOR_ROLE_VALUE = "role" + const mockGetCollaboratorRoleResponse = { + role: MOCK_COLLABORATOR_ROLE_VALUE, + } + mockCollaboratorsService.getRole.mockResolvedValue( + MOCK_COLLABORATOR_ROLE_VALUE + ) + + // Act + const resp = await request(app) + .get(`/${mockSiteName}/collaborators/role`) + .expect(200) + + // Assert + expect(resp.body).toStrictEqual(mockGetCollaboratorRoleResponse) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledWith( + mockSiteName, + mockIsomerUserId + ) + }) + }) + + describe("get collaborators statistics", () => { + it("should get collaborators statistics", async () => { + // Arrange + const MOCK_COLLABORATORS_STATISTICS = { + total: 1, + inactive: 1, + } + mockCollaboratorsService.getStatistics.mockResolvedValue( + MOCK_COLLABORATORS_STATISTICS + ) + + // Act + const resp = await request(app) + .get(`/${mockSiteName}/collaborators/statistics`) + .expect(200) + + // Assert + expect(resp.body).toStrictEqual(MOCK_COLLABORATORS_STATISTICS) + expect(mockCollaboratorsService.getStatistics).toHaveBeenCalledWith( + mockSiteName + ) + }) + + it("should return 404 if a NotFoundError occurred", async () => { + // Arrange + const mockErrorMessage = "error" + mockCollaboratorsService.getStatistics.mockResolvedValue( + new NotFoundError(mockErrorMessage) + ) + + // Act + const resp = await request(app) + .get(`/${mockSiteName}/collaborators/statistics`) + .expect(404) + + // Assert + expect(resp.body).toStrictEqual({ message: mockErrorMessage }) + expect(mockCollaboratorsService.getStatistics).toHaveBeenCalledWith( + mockSiteName + ) + }) + }) +}) diff --git a/src/routes/v2/authenticated/__tests__/review.spec.ts b/src/routes/v2/authenticated/__tests__/review.spec.ts new file mode 100644 index 000000000..ba3cea684 --- /dev/null +++ b/src/routes/v2/authenticated/__tests__/review.spec.ts @@ -0,0 +1,1324 @@ +import express from "express" +import request from "supertest" + +import RequestNotFoundError from "@errors/RequestNotFoundError" + +import { attachReadRouteHandlerWrapper } from "@middleware/routeHandler" + +import { ReviewsRouter as _ReviewsRouter } from "@routes/v2/authenticated/review" + +import { generateRouterForDefaultUserWithSite } from "@fixtures/app" +import { mockUserId } from "@fixtures/identity" +import { MOCK_USER_EMAIL_ONE, MOCK_USER_EMAIL_TWO } from "@fixtures/users" +import { CollaboratorRoles, ReviewRequestStatus } from "@root/constants" +import CollaboratorsService from "@services/identity/CollaboratorsService" +import NotificationsService from "@services/identity/NotificationsService" +import SitesService from "@services/identity/SitesService" +import UsersService from "@services/identity/UsersService" +import ReviewRequestService from "@services/review/ReviewRequestService" + +describe("Review Requests Router", () => { + const mockReviewRequestService = { + approveReviewRequest: jest.fn(), + closeReviewRequest: jest.fn(), + compareDiff: jest.fn(), + createComment: jest.fn(), + createReviewRequest: jest.fn(), + deleteAllReviewRequestViews: jest.fn(), + deleteReviewRequestApproval: jest.fn(), + getComments: jest.fn(), + getFullReviewRequest: jest.fn(), + getReviewRequest: jest.fn(), + listReviewRequest: jest.fn(), + markAllReviewRequestsAsViewed: jest.fn(), + markReviewRequestAsViewed: jest.fn(), + mergeReviewRequest: jest.fn(), + updateReviewRequest: jest.fn(), + updateReviewRequestLastViewedAt: jest.fn(), + } + + const mockIdentityUsersService = { + findByEmail: jest.fn(), + getSiteMember: jest.fn(), + } + + const mockSitesService = { + getBySiteName: jest.fn(), + } + + const mockCollaboratorsService = { + getRole: jest.fn(), + list: jest.fn(), + } + + const mockNotificationsService = { + create: jest.fn(), + } + + const ReviewsRouter = new _ReviewsRouter( + (mockReviewRequestService as unknown) as ReviewRequestService, + (mockIdentityUsersService as unknown) as UsersService, + (mockSitesService as unknown) as SitesService, + (mockCollaboratorsService as unknown) as CollaboratorsService, + (mockNotificationsService as unknown) as NotificationsService + ) + + const subrouter = express() + // We can use read route handler here because we don't need to lock the repo + subrouter.get( + "/:siteName/review/compare", + attachReadRouteHandlerWrapper(ReviewsRouter.compareDiff) + ) + subrouter.post( + "/:siteName/review/request", + attachReadRouteHandlerWrapper(ReviewsRouter.createReviewRequest) + ) + subrouter.get( + "/:siteName/review/summary", + attachReadRouteHandlerWrapper(ReviewsRouter.listReviews) + ) + subrouter.post( + "/:siteName/review/viewed", + attachReadRouteHandlerWrapper(ReviewsRouter.markAllReviewRequestsAsViewed) + ) + subrouter.get( + "/:siteName/review/:requestId", + attachReadRouteHandlerWrapper(ReviewsRouter.getReviewRequest) + ) + subrouter.post( + "/:siteName/review/:requestId/viewed", + attachReadRouteHandlerWrapper(ReviewsRouter.markReviewRequestAsViewed) + ) + subrouter.post( + "/:siteName/review/:requestId/merge", + attachReadRouteHandlerWrapper(ReviewsRouter.mergeReviewRequest) + ) + subrouter.post( + "/:siteName/review/:requestId/approve", + attachReadRouteHandlerWrapper(ReviewsRouter.approveReviewRequest) + ) + subrouter.get( + "/:siteName/review/:requestId/comments", + attachReadRouteHandlerWrapper(ReviewsRouter.getComments) + ) + subrouter.post( + "/:siteName/review/:requestId/comments", + attachReadRouteHandlerWrapper(ReviewsRouter.createComment) + ) + subrouter.delete( + "/:siteName/review/:requestId/approve", + attachReadRouteHandlerWrapper(ReviewsRouter.deleteReviewRequestApproval) + ) + subrouter.post( + "/:siteName/review/:requestId/comments/viewedComments", + attachReadRouteHandlerWrapper( + ReviewsRouter.markReviewRequestCommentsAsViewed + ) + ) + subrouter.post( + "/:siteName/review/:requestId", + attachReadRouteHandlerWrapper(ReviewsRouter.updateReviewRequest) + ) + subrouter.delete( + "/:siteName/review/:requestId", + attachReadRouteHandlerWrapper(ReviewsRouter.closeReviewRequest) + ) + + const app = generateRouterForDefaultUserWithSite(subrouter) + + beforeEach(() => { + jest.clearAllMocks() + }) + + describe("compareDiff", () => { + it("should return 200 with the list of changed files", async () => { + // Arrange + const mockFilesChanged = ["file1", "file2"] + mockIdentityUsersService.getSiteMember.mockResolvedValueOnce("user") + mockReviewRequestService.compareDiff.mockResolvedValueOnce( + mockFilesChanged + ) + + // Act + const response = await request(app).get("/mockSite/review/compare") + + // Assert + expect(response.status).toEqual(200) + expect(response.body).toEqual({ items: mockFilesChanged }) + expect(mockIdentityUsersService.getSiteMember).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.compareDiff).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if user is not a site member", async () => { + // Arrange + mockIdentityUsersService.getSiteMember.mockResolvedValueOnce(null) + + // Act + const response = await request(app).get("/mockSite/review/compare") + + // Assert + expect(response.status).toEqual(404) + expect(mockIdentityUsersService.getSiteMember).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.compareDiff).not.toHaveBeenCalled() + }) + }) + + describe("createReviewRequest", () => { + it("should return 200 with the pull request number of the created review request", async () => { + // Arrange + const mockPullRequestNumber = 1 + const mockReviewer = "reviewer@test.gov.sg" + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockIdentityUsersService.findByEmail.mockResolvedValueOnce("user") + mockCollaboratorsService.list.mockResolvedValueOnce([ + { + email: mockReviewer, + SiteMember: { + role: CollaboratorRoles.Admin, + }, + id: mockUserId, + }, + ]) + mockReviewRequestService.createReviewRequest.mockResolvedValueOnce( + mockPullRequestNumber + ) + mockNotificationsService.create.mockResolvedValueOnce([]) + + // Act + const response = await request(app) + .post("/mockSite/review/request") + .send({ + reviewers: [mockReviewer], + title: "mockTitle", + description: "mockDescription", + }) + + // Assert + expect(response.status).toEqual(200) + expect(response.body).toEqual({ + pullRequestNumber: mockPullRequestNumber, + }) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockIdentityUsersService.findByEmail).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.list).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.createReviewRequest + ).toHaveBeenCalledTimes(1) + expect(mockNotificationsService.create).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app) + .post("/mockSite/review/request") + .send({}) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect(mockIdentityUsersService.findByEmail).not.toHaveBeenCalled() + expect(mockCollaboratorsService.list).not.toHaveBeenCalled() + expect( + mockReviewRequestService.createReviewRequest + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a site collaborator", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app) + .post("/mockSite/review/request") + .send({}) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockIdentityUsersService.findByEmail).not.toHaveBeenCalled() + expect(mockCollaboratorsService.list).not.toHaveBeenCalled() + expect( + mockReviewRequestService.createReviewRequest + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + + it("should return 400 if no reviewers are provided", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockIdentityUsersService.findByEmail.mockResolvedValueOnce("user") + + // Act + const response = await request(app) + .post("/mockSite/review/request") + .send({ + reviewers: [], + title: "mockTitle", + description: "mockDescription", + }) + + // Assert + expect(response.status).toEqual(400) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockIdentityUsersService.findByEmail).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.list).not.toHaveBeenCalled() + expect( + mockReviewRequestService.createReviewRequest + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + + it("should return 400 if provided reviewer is not an admin", async () => { + // Arrange + const mockReviewer = "reviewer@test.gov.sg" + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockIdentityUsersService.findByEmail.mockResolvedValueOnce("user") + mockCollaboratorsService.list.mockResolvedValueOnce([]) + + // Act + const response = await request(app) + .post("/mockSite/review/request") + .send({ + reviewers: [mockReviewer], + title: "mockTitle", + description: "mockDescription", + }) + + // Assert + expect(response.status).toEqual(400) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockIdentityUsersService.findByEmail).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.list).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.createReviewRequest + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + }) + + describe("listReviews", () => { + it("should return 200 with the list of reviews", async () => { + // Arrange + const mockReviews = ["review1", "review2"] + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.listReviewRequest.mockResolvedValueOnce( + mockReviews + ) + + // Act + const response = await request(app).get("/mockSite/review/summary") + + // Assert + expect(response.status).toEqual(200) + expect(response.body).toEqual({ reviews: mockReviews }) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.listReviewRequest).toHaveBeenCalledTimes( + 1 + ) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).get("/mockSite/review/summary") + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect(mockReviewRequestService.listReviewRequest).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a site collaborator", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app).get("/mockSite/review/summary") + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.listReviewRequest).not.toHaveBeenCalled() + }) + }) + + describe("markAllReviewRequestsAsViewed", () => { + it("should return 200 and mark all review requests as viewed", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + + // Act + const response = await request(app).post("/mockSite/review/viewed") + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.markAllReviewRequestsAsViewed + ).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post("/mockSite/review/viewed") + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect( + mockReviewRequestService.markAllReviewRequestsAsViewed + ).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a site collaborator", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post("/mockSite/review/viewed") + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.markAllReviewRequestsAsViewed + ).not.toHaveBeenCalled() + }) + }) + + describe("markReviewRequestAsViewed", () => { + it("should return 200 and mark review request as viewed", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce({ + id: 12345, + }) + + // Act + const response = await request(app).post(`/mockSite/review/12345/viewed`) + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.markReviewRequestAsViewed + ).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post(`/mockSite/review/12345/viewed`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.markReviewRequestAsViewed + ).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a site collaborator", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post(`/mockSite/review/12345/viewed`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.markReviewRequestAsViewed + ).not.toHaveBeenCalled() + }) + + it("should return 404 if review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).post(`/mockSite/review/12345/viewed`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.markReviewRequestAsViewed + ).not.toHaveBeenCalled() + }) + }) + + describe("getReviewRequest", () => { + it("should return 200 with the full review request", async () => { + // Arrange + const mockReviewRequest = "review request" + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getFullReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + + // Act + const response = await request(app).get(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(200) + expect(response.body).toEqual({ reviewRequest: mockReviewRequest }) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.getFullReviewRequest + ).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).get(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect( + mockReviewRequestService.getFullReviewRequest + ).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a site collaborator", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app).get(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.getFullReviewRequest + ).not.toHaveBeenCalled() + }) + + it("should return 404 if review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getFullReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).get(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.getFullReviewRequest + ).toHaveBeenCalledTimes(1) + }) + }) + + describe("updateReviewRequest", () => { + it("should return 200 with the updated review request", async () => { + // Arrange + const mockReviewRequest = { requestor: { email: MOCK_USER_EMAIL_ONE } } + const mockReviewer = "reviewer@test.gov.sg" + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + mockCollaboratorsService.list.mockResolvedValueOnce([ + { + email: mockReviewer, + SiteMember: { + role: CollaboratorRoles.Admin, + }, + id: mockUserId, + }, + ]) + mockReviewRequestService.updateReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + + // Act + const response = await request(app) + .post(`/mockSite/review/12345`) + .send({ + reviewers: [mockReviewer], + }) + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.list).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.updateReviewRequest + ).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect(mockCollaboratorsService.list).not.toHaveBeenCalled() + expect( + mockReviewRequestService.updateReviewRequest + ).not.toHaveBeenCalled() + }) + + it("should return 404 if the review request is not found", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).post(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.list).not.toHaveBeenCalled() + expect( + mockReviewRequestService.updateReviewRequest + ).not.toHaveBeenCalled() + }) + + it("should return 403 if user is not the original requestor", async () => { + // Arrange + const mockReviewRequest = { requestor: { email: "other@test.gov.sg" } } + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + + // Act + const response = await request(app).post(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(403) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.list).not.toHaveBeenCalled() + expect( + mockReviewRequestService.updateReviewRequest + ).not.toHaveBeenCalled() + }) + + it("should return 400 if the given reviewers are not admins of the site", async () => { + // Arrange + const mockReviewRequest = { requestor: { email: MOCK_USER_EMAIL_ONE } } + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + mockCollaboratorsService.list.mockResolvedValueOnce([]) + + // Act + const response = await request(app) + .post(`/mockSite/review/12345`) + .send({ + reviewers: [MOCK_USER_EMAIL_TWO], + }) + + // Assert + expect(response.status).toEqual(400) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.list).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.updateReviewRequest + ).not.toHaveBeenCalled() + }) + }) + + describe("mergeReviewRequest", () => { + it("should return 200 with the review request successfully merged", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + "review request" + ) + mockReviewRequestService.mergeReviewRequest.mockResolvedValueOnce( + undefined + ) + mockReviewRequestService.deleteAllReviewRequestViews.mockResolvedValueOnce( + undefined + ) + + // Act + const response = await request(app).post(`/mockSite/review/12345/merge`) + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.mergeReviewRequest).toHaveBeenCalledTimes( + 1 + ) + expect( + mockReviewRequestService.deleteAllReviewRequestViews + ).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post(`/mockSite/review/12345/merge`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect(mockReviewRequestService.mergeReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.deleteAllReviewRequestViews + ).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a site member", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post(`/mockSite/review/12345/merge`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect(mockReviewRequestService.mergeReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.deleteAllReviewRequestViews + ).not.toHaveBeenCalled() + }) + + it("should return 404 if the review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).post(`/mockSite/review/12345/merge`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.mergeReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.deleteAllReviewRequestViews + ).not.toHaveBeenCalled() + }) + }) + + describe("approveReviewRequest", () => { + it("should return 200 with the review request successfully marked as approved", async () => { + // Arrange + const mockReviewRequest = { + reviewers: [{ email: MOCK_USER_EMAIL_ONE }], + reviewStatus: ReviewRequestStatus.Open, + requestor: MOCK_USER_EMAIL_TWO, + } + const mockReviewer = "reviewer@test.gov.sg" + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + mockReviewRequestService.approveReviewRequest.mockResolvedValueOnce( + undefined + ) + mockCollaboratorsService.list.mockResolvedValueOnce([ + { + email: mockReviewer, + SiteMember: { + role: CollaboratorRoles.Admin, + }, + id: mockUserId, + }, + ]) + + // Act + const response = await request(app).post(`/mockSite/review/12345/approve`) + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.approveReviewRequest + ).toHaveBeenCalledTimes(1) + expect(mockNotificationsService.create).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post(`/mockSite/review/12345/approve`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.approveReviewRequest + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + + it("should return 404 if the review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).post(`/mockSite/review/12345/approve`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.approveReviewRequest + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + + it("should return 403 if the user is not a reviewer", async () => { + // Arrange + const mockReviewRequest = { + reviewers: [{ email: "other@test.gov.sg" }], + reviewStatus: ReviewRequestStatus.Open, + requestor: MOCK_USER_EMAIL_TWO, + } + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + + // Act + const response = await request(app).post(`/mockSite/review/12345/approve`) + + // Assert + expect(response.status).toEqual(403) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.approveReviewRequest + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + }) + + describe("getComments", () => { + it("should return 200 with the comments for a review request", async () => { + // Arrange + const mockComments = ["comment1", "comment2"] + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + "review request" + ) + mockReviewRequestService.getComments.mockResolvedValueOnce(mockComments) + + // Act + const response = await request(app).get(`/mockSite/review/12345/comments`) + + // Assert + expect(response.status).toEqual(200) + expect(response.body).toEqual(mockComments) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getComments).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).get(`/mockSite/review/12345/comments`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect(mockReviewRequestService.getComments).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app).get(`/mockSite/review/12345/comments`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect(mockReviewRequestService.getComments).not.toHaveBeenCalled() + }) + + it("should return 404 if the review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).get(`/mockSite/review/12345/comments`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getComments).not.toHaveBeenCalled() + }) + }) + + describe("createComment", () => { + it("should return 200 with the comment created successfully", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + "review request" + ) + mockReviewRequestService.createComment.mockResolvedValueOnce(undefined) + + // Act + const response = await request(app) + .post(`/mockSite/review/12345/comments`) + .send({ message: "comment" }) + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.createComment).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app) + .post(`/mockSite/review/12345/comments`) + .send({ message: "comment" }) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect(mockReviewRequestService.createComment).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app) + .post(`/mockSite/review/12345/comments`) + .send({ message: "comment" }) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect(mockReviewRequestService.createComment).not.toHaveBeenCalled() + }) + + it("should return 404 if the review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app) + .post(`/mockSite/review/12345/comments`) + .send({ message: "comment" }) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.createComment).not.toHaveBeenCalled() + }) + }) + + describe("markReviewRequestCommentsAsViewed", () => { + it("should return 200 with the lastViewedAt timestamp updated successfully", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + "review request" + ) + mockReviewRequestService.updateReviewRequestLastViewedAt.mockResolvedValueOnce( + undefined + ) + + // Act + const response = await request(app).post( + `/mockSite/review/12345/comments/viewedComments` + ) + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.updateReviewRequestLastViewedAt + ).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post( + `/mockSite/review/12345/comments/viewedComments` + ) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).not.toHaveBeenCalled() + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.updateReviewRequestLastViewedAt + ).not.toHaveBeenCalled() + }) + + it("should return 404 if user is not a valid site member", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce(null) + + // Act + const response = await request(app).post( + `/mockSite/review/12345/comments/viewedComments` + ) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.updateReviewRequestLastViewedAt + ).not.toHaveBeenCalled() + }) + + it("should return 404 if the review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockCollaboratorsService.getRole.mockResolvedValueOnce("role") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).post( + `/mockSite/review/12345/comments/viewedComments` + ) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.updateReviewRequestLastViewedAt + ).not.toHaveBeenCalled() + }) + }) + + describe("closeReviewRequest", () => { + it("should return 200 with the review request closed successfully", async () => { + // Arrange + const mockReviewRequest = { requestor: { email: MOCK_USER_EMAIL_ONE } } + const mockReviewer = "reviewer@test.gov.sg" + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + mockReviewRequestService.closeReviewRequest.mockResolvedValueOnce( + undefined + ) + mockReviewRequestService.deleteAllReviewRequestViews.mockResolvedValueOnce( + undefined + ) + mockCollaboratorsService.list.mockResolvedValueOnce([ + { + email: mockReviewer, + SiteMember: { + role: CollaboratorRoles.Admin, + }, + id: mockUserId, + }, + ]) + + // Act + const response = await request(app).delete(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.closeReviewRequest).toHaveBeenCalledTimes( + 1 + ) + expect( + mockReviewRequestService.deleteAllReviewRequestViews + ).toHaveBeenCalledTimes(1) + expect(mockNotificationsService.create).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).delete(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect(mockReviewRequestService.closeReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.deleteAllReviewRequestViews + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + + it("should return 404 if the review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).delete(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.closeReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.deleteAllReviewRequestViews + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + + it("should return 404 if the user is not the requestor of the review request", async () => { + // Arrange + const mockReviewRequest = { requestor: { email: "other@test.gov.sg" } } + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + + // Act + const response = await request(app).delete(`/mockSite/review/12345`) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.closeReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.deleteAllReviewRequestViews + ).not.toHaveBeenCalled() + expect(mockNotificationsService.create).not.toHaveBeenCalled() + }) + }) + + describe("deleteReviewRequestApproval", () => { + it("should return 200 with the review request approval deleted successfully", async () => { + // Arrange + const mockReviewRequest = { reviewers: [{ email: MOCK_USER_EMAIL_ONE }] } + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + mockReviewRequestService.deleteReviewRequestApproval.mockResolvedValueOnce( + undefined + ) + + // Act + const response = await request(app).delete( + `/mockSite/review/12345/approve` + ) + + // Assert + expect(response.status).toEqual(200) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.deleteReviewRequestApproval + ).toHaveBeenCalledTimes(1) + }) + + it("should return 404 if the site does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce(null) + + // Act + const response = await request(app).delete( + `/mockSite/review/12345/approve` + ) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).not.toHaveBeenCalled() + expect( + mockReviewRequestService.deleteReviewRequestApproval + ).not.toHaveBeenCalled() + }) + + it("should return 404 if the review request does not exist", async () => { + // Arrange + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const response = await request(app).delete( + `/mockSite/review/12345/approve` + ) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.deleteReviewRequestApproval + ).not.toHaveBeenCalled() + }) + + it("should return 404 if the user is not a reviewer of the review request", async () => { + // Arrange + const mockReviewRequest = { reviewers: [{ email: "other@test.gov.sg" }] } + mockSitesService.getBySiteName.mockResolvedValueOnce("site") + mockReviewRequestService.getReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + + // Act + const response = await request(app).delete( + `/mockSite/review/12345/approve` + ) + + // Assert + expect(response.status).toEqual(404) + expect(mockSitesService.getBySiteName).toHaveBeenCalledTimes(1) + expect(mockReviewRequestService.getReviewRequest).toHaveBeenCalledTimes(1) + expect( + mockReviewRequestService.deleteReviewRequestApproval + ).not.toHaveBeenCalled() + }) + }) +}) diff --git a/src/routes/v2/authenticated/collaborators.ts b/src/routes/v2/authenticated/collaborators.ts new file mode 100644 index 000000000..4d278ea33 --- /dev/null +++ b/src/routes/v2/authenticated/collaborators.ts @@ -0,0 +1,169 @@ +import autoBind from "auto-bind" +import express from "express" +import _ from "lodash" + +import { AuthorizationMiddleware } from "@middleware/authorization" +import { attachReadRouteHandlerWrapper } from "@middleware/routeHandler" + +import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" + +import { BaseIsomerError } from "@root/errors/BaseError" +import { attachSiteHandler } from "@root/middleware" +import { RequestHandler } from "@root/types" +import { UserDto } from "@root/types/dto/review" +import CollaboratorsService from "@services/identity/CollaboratorsService" + +interface CollaboratorsRouterProps { + collaboratorsService: CollaboratorsService + authorizationMiddleware: AuthorizationMiddleware +} + +// eslint-disable-next-line import/prefer-default-export +export class CollaboratorsRouter { + private readonly collaboratorsService + + private readonly authorizationMiddleware + + constructor({ + collaboratorsService, + authorizationMiddleware, + }: CollaboratorsRouterProps) { + this.collaboratorsService = collaboratorsService + this.authorizationMiddleware = authorizationMiddleware + autoBind(this) + } + + createCollaborator: RequestHandler< + never, + unknown, + { email: string; acknowledge?: boolean }, + { siteName: string }, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { email, acknowledge = false } = req.body + const { siteName } = req.params + const resp = await this.collaboratorsService.create( + siteName, + email, + acknowledge + ) + + // Check for error and throw + if (resp instanceof BaseIsomerError) { + throw resp + } + return res.sendStatus(200) + } + + deleteCollaborator: RequestHandler< + never, + unknown, + never, + { siteName: string; userId: string }, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { siteName, userId } = req.params + const resp = await this.collaboratorsService.delete(siteName, userId) + + // Check for error and throw + if (resp instanceof BaseIsomerError) { + throw resp + } + return res.sendStatus(200) + } + + listCollaborators: RequestHandler< + { siteName: string }, + { collaborators: UserDto[] }, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { siteName } = req.params + const { userWithSiteSessionData } = res.locals + const rawCollaborators = await this.collaboratorsService.list( + siteName, + userWithSiteSessionData.isomerUserId + ) + const collaborators: UserDto[] = rawCollaborators.map((collaborator) => ({ + ..._.omit(collaborator.toJSON(), "SiteMember"), + email: collaborator.email || "", + role: collaborator.SiteMember.role, + })) + + return res.status(200).json({ collaborators }) + } + + getCollaboratorRole: RequestHandler< + never, + unknown, + never, + { siteName: string }, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { siteName } = req.params + const { userWithSiteSessionData } = res.locals + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + return res.status(200).json({ role }) + } + + getCollaboratorsStatistics: RequestHandler< + { siteName: string }, + unknown, + never, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { siteName } = req.params + const statistics = await this.collaboratorsService.getStatistics(siteName) + + // Check for error and throw + if (statistics instanceof BaseIsomerError) { + return res.status(404).json({ message: statistics.message }) + } + return res.status(200).json(statistics) + } + + getRouter() { + const router = express.Router({ mergeParams: true }) + router.use( + attachSiteHandler, + this.authorizationMiddleware.verifyIsEmailUser + ) + router.get( + "/role", + attachSiteHandler, + this.authorizationMiddleware.verifySiteMember, + attachReadRouteHandlerWrapper(this.getCollaboratorRole) + ) + router.get( + "/", + attachSiteHandler, + this.authorizationMiddleware.verifySiteMember, + attachReadRouteHandlerWrapper(this.listCollaborators) + ) + router.post( + "/", + attachSiteHandler, + this.authorizationMiddleware.verifySiteAdmin, + attachReadRouteHandlerWrapper(this.createCollaborator) + ) + router.delete( + "/:userId", + attachSiteHandler, + this.authorizationMiddleware.verifySiteAdmin, + attachReadRouteHandlerWrapper(this.deleteCollaborator) + ) + router.get( + "/statistics", + attachSiteHandler, + this.authorizationMiddleware.verifySiteMember, + attachReadRouteHandlerWrapper(this.getCollaboratorsStatistics) + ) + + return router + } +} diff --git a/src/routes/v2/authenticated/index.js b/src/routes/v2/authenticated/index.js index fb042d709..287731070 100644 --- a/src/routes/v2/authenticated/index.js +++ b/src/routes/v2/authenticated/index.js @@ -1,36 +1,68 @@ +import { attachSiteHandler } from "@root/middleware" + +import { NotificationsRouter } from "./notifications" + const express = require("express") const { NetlifyTomlService, } = require("@services/configServices/NetlifyTomlService") -const { SitesService } = require("@services/utilServices/SitesService") +const { CollaboratorsRouter } = require("./collaborators") const { NetlifyTomlRouter } = require("./netlifyToml") const { SitesRouter } = require("./sites") const { UsersRouter } = require("./users") const getAuthenticatedSubrouter = ({ - authMiddleware, - gitHubService, - configYmlService, + authenticationMiddleware, + sitesService, usersService, apiLogger, + isomerAdminsService, + collaboratorsService, + authorizationMiddleware, + reviewRouter, + notificationsService, }) => { - const sitesService = new SitesService({ gitHubService, configYmlService }) const netlifyTomlService = new NetlifyTomlService() - const sitesV2Router = new SitesRouter({ sitesService }) + const sitesV2Router = new SitesRouter({ + sitesService, + authorizationMiddleware, + }) + const collaboratorsRouter = new CollaboratorsRouter({ + collaboratorsService, + authorizationMiddleware, + }) const usersRouter = new UsersRouter({ usersService }) const netlifyTomlV2Router = new NetlifyTomlRouter({ netlifyTomlService }) + const notificationsRouter = new NotificationsRouter({ + authorizationMiddleware, + notificationsService, + }) const authenticatedSubrouter = express.Router({ mergeParams: true }) - authenticatedSubrouter.use(authMiddleware.verifyJwt) + authenticatedSubrouter.use(authenticationMiddleware.verifyAccess) // NOTE: apiLogger needs to be after `verifyJwt` as it logs the github username // which is only available after verifying that the jwt is valid authenticatedSubrouter.use(apiLogger) - authenticatedSubrouter.use("/sites", sitesV2Router.getRouter()) + authenticatedSubrouter.use( + "/sites/:siteName/collaborators", + collaboratorsRouter.getRouter() + ) + const baseSitesV2Router = sitesV2Router.getRouter() + const sitesRouterWithReviewRequest = baseSitesV2Router.use( + "/:siteName/review", + attachSiteHandler, + reviewRouter.getRouter() + ) + authenticatedSubrouter.use("/sites", sitesRouterWithReviewRequest) + authenticatedSubrouter.use( + "/sites/:siteName/notifications", + notificationsRouter.getRouter() + ) authenticatedSubrouter.use("/user", usersRouter.getRouter()) authenticatedSubrouter.use("/netlify-toml", netlifyTomlV2Router.getRouter()) diff --git a/src/routes/v2/authenticated/netlifyToml.js b/src/routes/v2/authenticated/netlifyToml.js index 65d3a5adb..a65e6ce33 100644 --- a/src/routes/v2/authenticated/netlifyToml.js +++ b/src/routes/v2/authenticated/netlifyToml.js @@ -12,11 +12,11 @@ class NetlifyTomlRouter { // Read netlify.toml file async readNetlifyToml(req, res) { - const { accessToken } = res.locals + const { userSessionData } = res.locals - const netlifyTomlHeaderValues = await this.netlifyTomlService.read({ - accessToken, - }) + const netlifyTomlHeaderValues = await this.netlifyTomlService.read( + userSessionData + ) return res.status(200).json({ netlifyTomlHeaderValues }) } diff --git a/src/routes/v2/authenticated/notifications.ts b/src/routes/v2/authenticated/notifications.ts new file mode 100644 index 000000000..832d54d4c --- /dev/null +++ b/src/routes/v2/authenticated/notifications.ts @@ -0,0 +1,105 @@ +import autoBind from "auto-bind" +import express from "express" + +import { + attachReadRouteHandlerWrapper, + attachWriteRouteHandlerWrapper, +} from "@middleware/routeHandler" + +import UserWithSiteSessionData from "@root/classes/UserWithSiteSessionData" +import { attachSiteHandler } from "@root/middleware" +import { AuthorizationMiddleware } from "@root/middleware/authorization" +import { RequestHandler } from "@root/types" +import NotificationsService, { + NotificationResponse, +} from "@services/identity/NotificationsService" + +interface NotificationsRouterProps { + notificationsService: NotificationsService + authorizationMiddleware: AuthorizationMiddleware +} + +// eslint-disable-next-line import/prefer-default-export +export class NotificationsRouter { + private readonly notificationsService + + private readonly authorizationMiddleware + + constructor({ + notificationsService, + authorizationMiddleware, + }: NotificationsRouterProps) { + this.notificationsService = notificationsService + this.authorizationMiddleware = authorizationMiddleware + autoBind(this) + } + + getRecentNotifications: RequestHandler< + never, + NotificationResponse[], + unknown, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { userWithSiteSessionData } = res.locals + const { siteName, isomerUserId: userId } = userWithSiteSessionData + + const notifications = await this.notificationsService.listRecent({ + siteName, + userId, + }) + return res.status(200).json(notifications) + } + + getAllNotifications: RequestHandler< + never, + NotificationResponse[], + unknown, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { userWithSiteSessionData } = res.locals + const { siteName, isomerUserId: userId } = userWithSiteSessionData + + const notifications = await this.notificationsService.listAll({ + siteName, + userId, + }) + return res.status(200).json(notifications) + } + + markNotificationsAsRead: RequestHandler< + never, + unknown, + unknown, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { userWithSiteSessionData } = res.locals + const { siteName, isomerUserId: userId } = userWithSiteSessionData + + await this.notificationsService.markNotificationsAsRead({ + siteName, + userId, + }) + return res.status(200).send("OK") + } + + getRouter() { + const router = express.Router({ mergeParams: true }) + router.use(attachSiteHandler) + router.use(this.authorizationMiddleware.verifySiteMember) + + router.get("/", attachReadRouteHandlerWrapper(this.getRecentNotifications)) + router.get( + "/allNotifications", + attachReadRouteHandlerWrapper(this.getAllNotifications) + ) + router.post( + "/", + attachWriteRouteHandlerWrapper(this.markNotificationsAsRead) + ) + + return router + } +} diff --git a/src/routes/v2/authenticated/review.ts b/src/routes/v2/authenticated/review.ts new file mode 100644 index 000000000..ab69a363a --- /dev/null +++ b/src/routes/v2/authenticated/review.ts @@ -0,0 +1,1297 @@ +import autoBind from "auto-bind" +import express from "express" +import _ from "lodash" + +import logger from "@logger/logger" + +import { + attachReadRouteHandlerWrapper, + attachWriteRouteHandlerWrapper, +} from "@middleware/routeHandler" + +import UserSessionData from "@classes/UserSessionData" +import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" + +import { CollaboratorRoles, ReviewRequestStatus } from "@root/constants" +import { SiteMember, User } from "@root/database/models" +import CollaboratorsService from "@root/services/identity/CollaboratorsService" +import NotificationsService from "@root/services/identity/NotificationsService" +import SitesService from "@root/services/identity/SitesService" +import UsersService from "@root/services/identity/UsersService" +import { isIsomerError, RequestHandler } from "@root/types" +import { ResponseErrorBody } from "@root/types/dto/error" +import { + CommentItem, + DashboardReviewRequestDto, + EditedItemDto, + UpdateReviewRequestDto, + ReviewRequestDto, + BlobDiffDto, +} from "@root/types/dto/review" +import ReviewRequestService from "@services/review/ReviewRequestService" +// eslint-disable-next-line import/prefer-default-export +export class ReviewsRouter { + private readonly reviewRequestService + + private readonly identityUsersService + + private readonly sitesService + + private readonly collaboratorsService + + private readonly notificationsService + + constructor( + reviewRequestService: ReviewRequestService, + identityUsersService: UsersService, + sitesService: SitesService, + collaboratorsService: CollaboratorsService, + notificationsService: NotificationsService + ) { + this.reviewRequestService = reviewRequestService + this.identityUsersService = identityUsersService + this.sitesService = sitesService + this.collaboratorsService = collaboratorsService + this.notificationsService = notificationsService + + autoBind(this) + } + + compareDiff: RequestHandler< + { siteName: string }, + { items: EditedItemDto[] }, + unknown, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + const { userWithSiteSessionData } = res.locals + const { siteName } = req.params + + // Check if they have access to site + const possibleSiteMember = await this.identityUsersService.getSiteMember( + userWithSiteSessionData.isomerUserId, + siteName + ) + + if (!possibleSiteMember) { + return res.status(404).send() + } + + const files = await this.reviewRequestService.compareDiff( + userWithSiteSessionData + ) + + return res.status(200).json({ items: files }) + } + + createReviewRequest: RequestHandler< + { siteName: string }, + { pullRequestNumber: number } | ResponseErrorBody, + { reviewers: string[]; title: string; description: string }, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName } = req.params + const site = await this.sitesService.getBySiteName(siteName) + const { userWithSiteSessionData } = res.locals + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "createReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: + "Please ensure that the site you are requesting a review for exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + // Check if they are a site admin + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + logger.error({ + message: + "User attempted to create review request with invalid permissions", + method: "createReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Only site members can request reviews!", + }) + } + + const admin = await this.identityUsersService.findByEmail( + userWithSiteSessionData.email + ) + const { reviewers, title, description } = req.body + + // Step 3: Check if reviewers are admins of repo + // Check if number of requested reviewers > 0 + if (reviewers.length === 0) { + return res.status(400).json({ + message: "Please ensure that you have selected at least 1 reviewer!", + }) + } + const reviewersMap: Record = {} + + // May we repent for writing such code in production. + reviewers.forEach((email) => { + reviewersMap[email] = true + }) + + const collaborators = await this.collaboratorsService.list( + siteName, + userWithSiteSessionData.isomerUserId + ) + + // Filter to get admins, + // then ensure that they have been requested for review + const admins = collaborators + .filter( + (collaborator) => + collaborator.SiteMember.role === CollaboratorRoles.Admin + ) + .filter((collaborator) => reviewersMap[collaborator.email || ""]) + + const areAllReviewersAdmin = admins.length === reviewers.length + if (!areAllReviewersAdmin) { + return res.status(400).send({ + message: "Please ensure that all requested reviewers are admins!", + }) + } + + // Step 4: Create RR + const pullRequestNumber = await this.reviewRequestService.createReviewRequest( + userWithSiteSessionData, + admins, + // NOTE: Safe assertion as we first retrieve the role + // and assert that the user is an admin of said site. + // This guarantees that the user exists in our database. + admin!, + site, + title, + description + ) + + // Step 5: Create notifications + await Promise.all( + collaborators.map(async (user: User & { SiteMember: SiteMember }) => { + // Don't send notification to self + if (user.id.toString() === userWithSiteSessionData.isomerUserId) return + const notificationType = reviewersMap[user.email || ""] + ? "sent_request" + : "request_created" + await this.notificationsService.create({ + siteMember: user.SiteMember, + link: `/sites/${siteName}/review/${pullRequestNumber}`, + notificationType, + notificationSourceUsername: userWithSiteSessionData.email, + }) + }) + ) + + return res.status(200).send({ + pullRequestNumber, + }) + } + + listReviews: RequestHandler< + { siteName: string }, + { reviews: DashboardReviewRequestDto[] } | ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName } = req.params + const site = await this.sitesService.getBySiteName(siteName) + const { userWithSiteSessionData } = res.locals + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "listReviews", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + // Check if they are a collaborator + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + logger.error({ + message: "Insufficient permissions to view review request", + method: "listReviews", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Only collaborators of a site can view reviews!", + }) + } + + // Step 3: Fetch data and return + const reviews = await this.reviewRequestService.listReviewRequest( + userWithSiteSessionData, + site + ) + + return res.status(200).json({ + reviews, + }) + } + + markAllReviewRequestsAsViewed: RequestHandler< + { siteName: string }, + string | ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName } = req.params + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + const { userWithSiteSessionData } = res.locals + + // Check if they are a collaborator + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + return res.status(404).send({ + message: "User is not a collaborator of this site!", + }) + } + + // Step 3: Update all review requests for the site as viewed + await this.reviewRequestService.markAllReviewRequestsAsViewed( + userWithSiteSessionData, + site + ) + + return res.status(200).send() + } + + markReviewRequestAsViewed: RequestHandler< + { siteName: string; requestId: number }, + string | ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId: prNumber } = req.params + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + const { userWithSiteSessionData } = res.locals + + // Check if they are a collaborator + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + return res.status(404).send({ + message: "User is not a collaborator of this site!", + }) + } + + // Step 3: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + prNumber + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "markReviewRequestAsViewed", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + prNumber, + }, + }) + return res.status(404).json({ message: possibleReviewRequest.message }) + } + + // Step 4: Mark review request as viewed + await this.reviewRequestService.markReviewRequestAsViewed( + userWithSiteSessionData, + site, + possibleReviewRequest.id + ) + + return res.status(200).json() + } + + getReviewRequest: RequestHandler< + { siteName: string; requestId: number }, + { reviewRequest: ReviewRequestDto } | ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId } = req.params + const { userWithSiteSessionData } = res.locals + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "getReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + // Check if they are a collaborator + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + logger.error({ + message: "Insufficient permissions to retrieve review request", + method: "getReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).send({ + message: "Only collaborators of a site can view reviews!", + }) + } + + const possibleReviewRequest = await this.reviewRequestService.getFullReviewRequest( + userWithSiteSessionData, + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "getReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(possibleReviewRequest.status).send({ + message: possibleReviewRequest.message, + }) + } + + return res.status(200).json({ reviewRequest: possibleReviewRequest }) + } + + updateReviewRequest: RequestHandler< + { siteName: string; requestId: number }, + ResponseErrorBody, + UpdateReviewRequestDto, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId } = req.params + const { userWithSiteSessionData } = res.locals + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "updateReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "updateReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).json({ message: possibleReviewRequest.message }) + } + + // Step 3: Check that the user updating is the requestor + const { requestor } = possibleReviewRequest + if (requestor.email !== userWithSiteSessionData.email) { + logger.error({ + message: "Insufficient permissions to update review request", + method: "updateReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(403).json({ + message: "Only requestors can update the review request!", + }) + } + + // Step 4: Check that all new reviewers are admins of the site + const { reviewers } = req.body + const collaborators = await this.collaboratorsService.list(siteName) + const collaboratorMappings = Object.fromEntries( + reviewers.map((reviewer) => [reviewer, true]) + ) + const verifiedReviewers = collaborators.filter( + (collaborator) => + collaborator.SiteMember.role === CollaboratorRoles.Admin && + // NOTE: We check for existence of email on the user - since this + // is an identity feature, we assume that **all** users calling this endpoint + // will have a valid email (guaranteed by our modal) + collaborator.email && + !!collaboratorMappings[collaborator.email] && + // NOTE: Prevent the requestor from adding themselves as a reviewer + collaborator.email !== requestor.email + ) + + if (verifiedReviewers.length !== reviewers.length) { + return res.status(400).json({ + message: + "Please ensure that all requested reviewers are admins of the site!", + }) + } + + // Step 5: Update the rr with the appropriate details + await this.reviewRequestService.updateReviewRequest(possibleReviewRequest, { + reviewers: verifiedReviewers, + }) + + return res.status(200).send() + } + + mergeReviewRequest: RequestHandler< + { siteName: string; requestId: number }, + ResponseErrorBody, + never, + unknown, + { userSessionData: UserSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId } = req.params + const { userSessionData } = res.locals + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "mergeReviewRequest", + meta: { + userId: userSessionData.isomerUserId, + email: userSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + // Check if they are a collaborator + const role = await this.collaboratorsService.getRole( + siteName, + userSessionData.isomerUserId + ) + + if (!role) { + logger.error({ + message: "Insufficient permissions to merge review request", + method: "mergeReviewRequest", + meta: { + userId: userSessionData.isomerUserId, + email: userSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).send({ + message: "Only collaborators of a site can view reviews!", + }) + } + + // Step 3: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "mergeReviewRequest", + meta: { + userId: userSessionData.isomerUserId, + email: userSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).json({ message: possibleReviewRequest.message }) + } + + // Step 4: Merge review request + // NOTE: We are not checking for existence of PR + // as the underlying Github API returns 404 if + // the requested review could not be found. + await this.reviewRequestService.mergeReviewRequest(possibleReviewRequest) + + // Step 5: Clean up the review request view records + // The error is discarded as we are guaranteed to have a review request + await this.reviewRequestService.deleteAllReviewRequestViews(site, requestId) + + return res.status(200).send() + } + + approveReviewRequest: RequestHandler< + { siteName: string; requestId: number }, + ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId } = req.params + const { userWithSiteSessionData } = res.locals + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "approveReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if ( + isIsomerError(possibleReviewRequest) || + // NOTE: Only allow approving review requests that are currently open + possibleReviewRequest.reviewStatus !== ReviewRequestStatus.Open + ) { + logger.error({ + message: "Invalid review request requested", + method: "approveReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).send({ + message: "Please ensure that the review request exists!", + }) + } + + // Step 3: Check if the user is a reviewer of the RR + const { reviewers } = possibleReviewRequest + const isReviewer = _.some( + reviewers, + (user) => + user.email === userWithSiteSessionData.email && + // NOTE: Check that the reviewer's email is not the requestor's email + // in order to prevent self approvals + user.email !== possibleReviewRequest.requestor.email + ) + + if (!isReviewer) { + logger.error({ + message: "Insufficient permissions to approve review request", + method: "approveReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(403).send({ + message: "Please ensure that you are a reviewer of the review request!", + }) + } + + // Step 4: Approve review request + // NOTE: We are not checking for existence of PR + // as the underlying Github API returns 404 if + // the requested review could not be found. + await this.reviewRequestService.approveReviewRequest(possibleReviewRequest) + + // Step 6: Create notifications + const collaborators = await this.collaboratorsService.list(siteName) + await Promise.all( + collaborators.map(async (user: User & { SiteMember: SiteMember }) => { + // Don't send notification to self + if (user.id.toString() === userWithSiteSessionData.isomerUserId) return + await this.notificationsService.create({ + siteMember: user.SiteMember, + link: `/sites/${siteName}/review/${requestId}`, + notificationType: "request_approved", + notificationSourceUsername: userWithSiteSessionData.email, + }) + }) + ) + + return res.status(200).send() + } + + getComments: RequestHandler< + { siteName: string; requestId: number }, + CommentItem[] | ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { siteName, requestId } = req.params + const { userWithSiteSessionData } = res.locals + // Step 1: Check that the site exists + const site = await this.sitesService.getBySiteName(siteName) + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "getComments", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + // Check if they are a collaborator + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + logger.error({ + message: "Insufficient permissions to retrieve review request comments", + method: "getComments", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).send({ + message: + "Only collaborators of a site can view review request comments!", + }) + } + + // Step 3: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "getComments", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).json({ message: possibleReviewRequest.message }) + } + + // Step 4: Retrieve comments + const comments = await this.reviewRequestService.getComments( + userWithSiteSessionData, + site, + requestId + ) + + return res.status(200).json(comments) + } + + createComment: RequestHandler< + { siteName: string; requestId: number }, + ResponseErrorBody, + { message: string }, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { siteName, requestId } = req.params + const { message } = req.body + const { userWithSiteSessionData } = res.locals + + // Step 1: Check that the site exists + const site = await this.sitesService.getBySiteName(siteName) + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "createComment", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + // Check if they are a collaborator + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + logger.error({ + message: "Insufficient permissions to retrieve review request comments", + method: "createComment", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).send({ + message: + "Only collaborators of a site can view review request comments!", + }) + } + + // Step 3: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "createComment", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).json({ message: possibleReviewRequest.message }) + } + + // Step 4: Create comment + await this.reviewRequestService.createComment( + userWithSiteSessionData, + requestId, + message + ) + + return res.status(200).send() + } + + markReviewRequestCommentsAsViewed: RequestHandler< + { siteName: string; requestId: number }, + string | ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId } = req.params + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Check that user exists. + // Having session data is proof that this user exists + // as otherwise, they would be rejected by our middleware + const { userWithSiteSessionData } = res.locals + + // Check if they are a collaborator + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + return res.status(404).send({ + message: "User is not a collaborator of this site!", + }) + } + + // Step 3: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + return res.status(404).json({ message: possibleReviewRequest.message }) + } + + // Step 4: Update user's last viewed timestamp for the review request + await this.reviewRequestService.updateReviewRequestLastViewedAt( + userWithSiteSessionData, + site, + possibleReviewRequest + ) + + return res.status(200).send() + } + + closeReviewRequest: RequestHandler< + { siteName: string; requestId: number }, + ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId } = req.params + const { userWithSiteSessionData } = res.locals + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "closeReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "closeReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res + .status(possibleReviewRequest.status) + .json({ message: possibleReviewRequest.message }) + } + + // Step 3: Check if the user is the requestor + const { requestor } = possibleReviewRequest + const isRequestor = requestor.email === userWithSiteSessionData.email + if (!isRequestor) { + logger.error({ + message: "Insufficient permissions to close review request", + method: "closeReviewRequest", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).json({ + message: "Only the requestor can close the Review Request!", + }) + } + + // Step 4: Close review request + // NOTE: We are not checking for existence of PR + // as the underlying Github API returns 404 if + // the requested review could not be found. + await this.reviewRequestService.closeReviewRequest(possibleReviewRequest) + + // Step 5: Clean up the review request view records + // The error is discarded as we are guaranteed to have a review request + await this.reviewRequestService.deleteAllReviewRequestViews(site, requestId) + + // Step 7: Create notifications + const collaborators = await this.collaboratorsService.list(siteName) + await Promise.all( + collaborators.map(async (user: User & { SiteMember: SiteMember }) => { + // Don't send notification to self + if (user.id.toString() === userWithSiteSessionData.isomerUserId) return + await this.notificationsService.create({ + siteMember: user.SiteMember, + link: `/sites/${siteName}/review/${requestId}`, + notificationType: "request_cancelled", + notificationSourceUsername: userWithSiteSessionData.email, + }) + }) + ) + return res.status(200).send() + } + + deleteReviewRequestApproval: RequestHandler< + { siteName: string; requestId: number }, + ResponseErrorBody, + never, + unknown, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId } = req.params + const { userWithSiteSessionData } = res.locals + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "deleteReviewRequestApproval", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "deleteReviewRequestApproval", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 3: Check if the user is a reviewer of the RR + const { reviewers } = possibleReviewRequest + const isReviewer = _.some( + reviewers, + (user) => user.email === userWithSiteSessionData.email + ) + + if (!isReviewer) { + logger.error({ + message: + "User with insufficient permissions attempted to delete approval", + method: "deleteReviewRequestApproval", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 4: Delete review request approval + await this.reviewRequestService.deleteReviewRequestApproval( + possibleReviewRequest + ) + return res.status(200).send() + } + + getBlob: RequestHandler< + { siteName: string; requestId: number }, + BlobDiffDto | ResponseErrorBody, + unknown, + { path: string }, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + // Step 1: Check that the site exists + const { siteName, requestId } = req.params + const { path } = req.query + const { userWithSiteSessionData } = res.locals + const site = await this.sitesService.getBySiteName(siteName) + + if (!site) { + logger.error({ + message: "Invalid site requested", + method: "getBlob", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 2: Retrieve review request + const possibleReviewRequest = await this.reviewRequestService.getReviewRequest( + site, + requestId + ) + + if (isIsomerError(possibleReviewRequest)) { + logger.error({ + message: "Invalid review request requested", + method: "getBlob", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + requestId, + file: path, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // Step 3: Check if the user is a contributor of the site + const role = await this.collaboratorsService.getRole( + siteName, + userWithSiteSessionData.isomerUserId + ) + + if (!role) { + logger.error({ + message: + "User with insufficient permissions attempted to retrieve blob diff", + method: "getBlob", + meta: { + userId: userWithSiteSessionData.isomerUserId, + email: userWithSiteSessionData.email, + siteName, + }, + }) + return res.status(404).send({ + message: "Please ensure that the site exists!", + }) + } + + // NOTE: Currently, Isomer only allows comparisons between staging and production. + // This might change in the future and in that case, the `getBlob` method call below + // should have the corresponding ref (`master` or `staging`) changed. + const prodPromise = this.reviewRequestService.getBlob( + siteName, + path, + "master" + ) + const stagingPromise = this.reviewRequestService.getBlob( + siteName, + path, + "staging" + ) + + const data = await Promise.all([prodPromise, stagingPromise]) + + return res.status(200).json({ + oldValue: data[0], + newValue: data[1], + }) + } + + getRouter() { + const router = express.Router({ mergeParams: true }) + + router.get("/compare", attachReadRouteHandlerWrapper(this.compareDiff)) + router.post( + "/request", + attachWriteRouteHandlerWrapper(this.createReviewRequest) + ) + router.get("/summary", attachReadRouteHandlerWrapper(this.listReviews)) + router.post( + "/viewed", + attachWriteRouteHandlerWrapper(this.markAllReviewRequestsAsViewed) + ) + router.get( + "/:requestId", + attachReadRouteHandlerWrapper(this.getReviewRequest) + ) + router.post( + "/:requestId/viewed", + attachWriteRouteHandlerWrapper(this.markReviewRequestAsViewed) + ) + router.post( + "/:requestId/merge", + attachWriteRouteHandlerWrapper(this.mergeReviewRequest) + ) + router.post( + "/:requestId/approve", + attachReadRouteHandlerWrapper(this.approveReviewRequest) + ) + router.get( + "/:requestId/comments", + attachWriteRouteHandlerWrapper(this.getComments) + ) + router.post( + "/:requestId/comments", + attachWriteRouteHandlerWrapper(this.createComment) + ) + router.delete( + "/:requestId/approve", + attachReadRouteHandlerWrapper(this.deleteReviewRequestApproval) + ) + router.post( + "/:requestId/comments/viewedComments", + attachWriteRouteHandlerWrapper(this.markReviewRequestCommentsAsViewed) + ) + router.post( + "/:requestId", + attachWriteRouteHandlerWrapper(this.updateReviewRequest) + ) + router.delete( + "/:requestId", + attachReadRouteHandlerWrapper(this.closeReviewRequest) + ) + router.get("/:requestId/blob", attachReadRouteHandlerWrapper(this.getBlob)) + return router + } +} diff --git a/src/routes/v2/authenticated/sites.js b/src/routes/v2/authenticated/sites.js deleted file mode 100644 index 2df9601ec..000000000 --- a/src/routes/v2/authenticated/sites.js +++ /dev/null @@ -1,79 +0,0 @@ -const autoBind = require("auto-bind") -const express = require("express") - -// Import middleware -const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") - -class SitesRouter { - constructor({ sitesService }) { - this.sitesService = sitesService - // We need to bind all methods because we don't invoke them from the class directly - autoBind(this) - } - - async getSites(req, res) { - const { accessToken } = res.locals - const siteNames = await this.sitesService.getSites({ accessToken }) - return res.status(200).json({ siteNames }) - } - - async checkHasAccess(req, res) { - const { - params: { siteName }, - } = req - const { userId, accessToken } = res.locals - - await this.sitesService.checkHasAccess( - { - accessToken, - siteName, - }, - { userId } - ) - return res.status(200).send("OK") - } - - async getLastUpdated(req, res) { - const { - params: { siteName }, - } = req - const { accessToken } = res.locals - const lastUpdated = await this.sitesService.getLastUpdated({ - accessToken, - siteName, - }) - return res.status(200).json({ lastUpdated }) - } - - async getStagingUrl(req, res) { - const { - params: { siteName }, - } = req - const { accessToken } = res.locals - - const stagingUrl = await this.sitesService.getStagingUrl({ - accessToken, - siteName, - }) - return res.status(200).json({ stagingUrl }) - } - - getRouter() { - const router = express.Router({ mergeParams: true }) - - router.get("/", attachReadRouteHandlerWrapper(this.getSites)) - router.get("/:siteName", attachReadRouteHandlerWrapper(this.checkHasAccess)) - router.get( - "/:siteName/lastUpdated", - attachReadRouteHandlerWrapper(this.getLastUpdated) - ) - router.get( - "/:siteName/stagingUrl", - attachReadRouteHandlerWrapper(this.getStagingUrl) - ) - - return router - } -} - -module.exports = { SitesRouter } diff --git a/src/routes/v2/authenticated/sites.ts b/src/routes/v2/authenticated/sites.ts new file mode 100644 index 000000000..e03771b87 --- /dev/null +++ b/src/routes/v2/authenticated/sites.ts @@ -0,0 +1,144 @@ +import autoBind from "auto-bind" +import express from "express" + +import type { AuthorizationMiddleware } from "@middleware/authorization" +import { attachReadRouteHandlerWrapper } from "@middleware/routeHandler" + +import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" + +import type UserSessionData from "@root/classes/UserSessionData" +import { BaseIsomerError } from "@root/errors/BaseError" +import { attachSiteHandler } from "@root/middleware" +import type { RequestHandler } from "@root/types" +import type SitesService from "@services/identity/SitesService" + +type SitesRouterProps = { + sitesService: SitesService + authorizationMiddleware: AuthorizationMiddleware +} + +export class SitesRouter { + private readonly sitesService + + private readonly authorizationMiddleware + + constructor({ sitesService, authorizationMiddleware }: SitesRouterProps) { + this.sitesService = sitesService + this.authorizationMiddleware = authorizationMiddleware + // We need to bind all methods because we don't invoke them from the class directly + autoBind(this) + } + + getSites: RequestHandler< + never, + unknown, + never, + never, + { userSessionData: UserSessionData } + > = async (req, res) => { + const { userSessionData } = res.locals + const siteNames = await this.sitesService.getSites(userSessionData) + return res.status(200).json({ siteNames }) + } + + getLastUpdated: RequestHandler< + { siteName: string }, + unknown, + never, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { userWithSiteSessionData } = res.locals + const lastUpdated = await this.sitesService.getLastUpdated( + userWithSiteSessionData + ) + return res.status(200).json({ lastUpdated }) + } + + getStagingUrl: RequestHandler< + { siteName: string }, + unknown, + never, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { userWithSiteSessionData } = res.locals + const possibleStagingUrl = await this.sitesService.getStagingUrl( + userWithSiteSessionData + ) + + // Check for error and throw + if (possibleStagingUrl instanceof BaseIsomerError) { + return res.status(404).json({ message: possibleStagingUrl.message }) + } + return res.status(200).json({ stagingUrl: possibleStagingUrl }) + } + + getSiteUrl: RequestHandler< + { siteName: string }, + unknown, + never, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { userWithSiteSessionData } = res.locals + const possibleSiteUrl = await this.sitesService.getSiteUrl( + userWithSiteSessionData + ) + + // Check for error and throw + if (possibleSiteUrl instanceof BaseIsomerError) { + return res.status(404).json({ message: possibleSiteUrl.message }) + } + return res.status(200).json({ siteUrl: possibleSiteUrl }) + } + + getSiteInfo: RequestHandler< + { siteName: string }, + unknown, + never, + never, + { userWithSiteSessionData: UserWithSiteSessionData } + > = async (req, res) => { + const { userWithSiteSessionData } = res.locals + + const possibleSiteInfo = await this.sitesService.getSiteInfo( + userWithSiteSessionData + ) + + // Check for error and throw + if (possibleSiteInfo instanceof BaseIsomerError) { + return res.status(400).json({ message: possibleSiteInfo.message }) + } + return res.status(200).json(possibleSiteInfo) + } + + getRouter() { + const router = express.Router({ mergeParams: true }) + + router.get("/", attachReadRouteHandlerWrapper(this.getSites)) + router.get( + "/:siteName/lastUpdated", + attachSiteHandler, + attachReadRouteHandlerWrapper(this.getLastUpdated) + ) + router.get( + "/:siteName/stagingUrl", + attachSiteHandler, + attachReadRouteHandlerWrapper(this.getStagingUrl) + ) + router.get( + "/:siteName/siteUrl", + attachSiteHandler, + attachReadRouteHandlerWrapper(this.getSiteUrl) + ) + router.get( + "/:siteName/info", + attachSiteHandler, + this.authorizationMiddleware.verifySiteMember, + attachReadRouteHandlerWrapper(this.getSiteInfo) + ) + + return router + } +} diff --git a/src/routes/v2/authenticated/users.ts b/src/routes/v2/authenticated/users.ts index 6ec49b223..2b1467566 100644 --- a/src/routes/v2/authenticated/users.ts +++ b/src/routes/v2/authenticated/users.ts @@ -8,6 +8,8 @@ import { BadRequestError } from "@errors/BadRequestError" import { attachReadRouteHandlerWrapper } from "@middleware/routeHandler" +import UserSessionData from "@classes/UserSessionData" + import { isError, RequestHandler } from "@root/types" import UsersService from "@services/identity/UsersService" @@ -61,15 +63,18 @@ export class UsersRouter { unknown, { email: string; otp: string }, never, - { userId: string } + { userSessionData: UserSessionData } > = async (req, res) => { const { email, otp } = req.body - const { userId } = res.locals - if (!this.usersService.verifyOtp(email, otp)) { + const { userSessionData } = res.locals + const userId = userSessionData.isomerUserId + + const isOtpValid = await this.usersService.verifyEmailOtp(email, otp) + if (!isOtpValid) { throw new BadRequestError("Invalid OTP") } - await this.usersService.updateUserByGitHubId(userId, { email }) + await this.usersService.updateUserByIsomerId(userId, { email }) return res.sendStatus(200) } @@ -92,15 +97,18 @@ export class UsersRouter { unknown, { mobile: string; otp: string }, never, - { userId: string } + { userSessionData: UserSessionData } > = async (req, res) => { const { mobile, otp } = req.body - const { userId } = res.locals - if (!this.usersService.verifyOtp(mobile, otp)) { + const { userSessionData } = res.locals + const userId = userSessionData.isomerUserId + + const isOtpValid = await this.usersService.verifyMobileOtp(mobile, otp) + if (!isOtpValid) { throw new BadRequestError("Invalid OTP") } - await this.usersService.updateUserByGitHubId(userId, { + await this.usersService.updateUserByIsomerId(userId, { contactNumber: mobile, }) return res.sendStatus(200) diff --git a/src/routes/v2/authenticatedSites/__tests__/CollectionPages.spec.js b/src/routes/v2/authenticatedSites/__tests__/CollectionPages.spec.js index c2f095ebc..c1d6b3030 100644 --- a/src/routes/v2/authenticatedSites/__tests__/CollectionPages.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/CollectionPages.spec.js @@ -4,6 +4,7 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { mockUserWithSiteSessionData } = require("@fixtures/sessionData") const { CollectionPagesRouter } = require("../collectionPages") @@ -69,13 +70,10 @@ describe("Collection Pages Router", () => { const siteName = "test-site" const collectionName = "collection" const subcollectionName = "subcollection" - const accessToken = undefined // Can't set request fields - will always be undefined const fileName = "test-file" const mockSha = "12345" const mockContent = "mock-content" - const reqDetails = { siteName, accessToken } - beforeEach(() => { jest.clearAllMocks() }) @@ -111,7 +109,7 @@ describe("Collection Pages Router", () => { .send(pageDetails) .expect(200) expect(mockCollectionPageService.create).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -131,7 +129,7 @@ describe("Collection Pages Router", () => { .send(pageDetails) .expect(200) expect(mockSubcollectionPageService.create).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -156,7 +154,7 @@ describe("Collection Pages Router", () => { .expect(200) expect(resp.body).toStrictEqual(expectedResponse) expect(mockCollectionPageService.read).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedControllerInput ) }) @@ -174,7 +172,7 @@ describe("Collection Pages Router", () => { .expect(200) expect(resp.body).toStrictEqual(expectedResponse) expect(mockSubcollectionPageService.read).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedControllerInput ) }) @@ -217,7 +215,7 @@ describe("Collection Pages Router", () => { .send(updatePageDetails) .expect(200) expect(mockCollectionPageService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -236,7 +234,7 @@ describe("Collection Pages Router", () => { .send(renamePageDetails) .expect(200) expect(mockCollectionPageService.rename).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -257,7 +255,7 @@ describe("Collection Pages Router", () => { .send(updatePageDetails) .expect(200) expect(mockSubcollectionPageService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -279,7 +277,7 @@ describe("Collection Pages Router", () => { .send(renamePageDetails) .expect(200) expect(mockSubcollectionPageService.rename).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -308,7 +306,7 @@ describe("Collection Pages Router", () => { .send(pageDetails) .expect(200) expect(mockCollectionPageService.delete).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -327,7 +325,7 @@ describe("Collection Pages Router", () => { .send(pageDetails) .expect(200) expect(mockSubcollectionPageService.delete).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) diff --git a/src/routes/v2/authenticatedSites/__tests__/Collections.spec.js b/src/routes/v2/authenticatedSites/__tests__/Collections.spec.js index 152e24a36..e4bf8782d 100644 --- a/src/routes/v2/authenticatedSites/__tests__/Collections.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/Collections.spec.js @@ -4,6 +4,10 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { + mockUserWithSiteSessionData, + mockGithubSessionData, +} = require("@fixtures/sessionData") const { CollectionsRouter } = require("../collections") @@ -93,14 +97,6 @@ describe("Collections Router", () => { const collectionName = "collection" const subcollectionName = "subcollection" - // Can't set request fields - will always be undefined - const accessToken = undefined - const currentCommitSha = undefined - const treeSha = undefined - - const reqDetails = { siteName, accessToken } - const additionalReqDetails = { ...reqDetails, currentCommitSha, treeSha } - beforeEach(() => { jest.clearAllMocks() }) @@ -126,7 +122,7 @@ describe("Collections Router", () => { expect(resp.body).toStrictEqual(expectedResponse) expect( mockCollectionDirectoryService.listAllCollections - ).toHaveBeenCalledWith(reqDetails) + ).toHaveBeenCalledWith(mockUserWithSiteSessionData) }) }) @@ -160,7 +156,7 @@ describe("Collections Router", () => { expect(resp.body).toStrictEqual(expectedResponse) expect( mockCollectionDirectoryService.listFiles - ).toHaveBeenCalledWith(reqDetails, { collectionName }) + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { collectionName }) }) it("returns all files in a subcollection", async () => { const expectedResponse = [ @@ -182,9 +178,13 @@ describe("Collections Router", () => { ) .expect(200) expect(resp.body).toStrictEqual(expectedResponse) - expect( - mockSubcollectionDirectoryService.listFiles - ).toHaveBeenCalledWith(reqDetails, { collectionName, subcollectionName }) + expect(mockSubcollectionDirectoryService.listFiles).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + { + collectionName, + subcollectionName, + } + ) }) }) @@ -205,7 +205,7 @@ describe("Collections Router", () => { expect(resp.body).toStrictEqual([]) expect( mockCollectionDirectoryService.createDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { collectionName, objArray: undefined, }) @@ -235,7 +235,7 @@ describe("Collections Router", () => { expect(resp.body).toStrictEqual(collectionDetails.items) expect( mockCollectionDirectoryService.createDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { collectionName, objArray: collectionDetails.items, }) @@ -255,7 +255,7 @@ describe("Collections Router", () => { expect(resp.body).toStrictEqual([]) expect( mockSubcollectionDirectoryService.createDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { collectionName, subcollectionName, objArray: undefined, @@ -286,7 +286,7 @@ describe("Collections Router", () => { expect(resp.body).toStrictEqual(collectionDetails.items) expect( mockSubcollectionDirectoryService.createDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { collectionName, subcollectionName, objArray: collectionDetails.items, @@ -311,10 +311,14 @@ describe("Collections Router", () => { .expect(200) expect( mockCollectionDirectoryService.renameDirectory - ).toHaveBeenCalledWith(reqDetails, { - collectionName, - newDirectoryName, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + collectionName, + newDirectoryName, + } + ) }) it("accepts valid subcollection rename requests", async () => { @@ -326,11 +330,15 @@ describe("Collections Router", () => { .expect(200) expect( mockSubcollectionDirectoryService.renameDirectory - ).toHaveBeenCalledWith(reqDetails, { - collectionName, - subcollectionName, - newDirectoryName, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + collectionName, + subcollectionName, + newDirectoryName, + } + ) }) }) @@ -341,9 +349,13 @@ describe("Collections Router", () => { .expect(200) expect( mockCollectionDirectoryService.deleteDirectory - ).toHaveBeenCalledWith(reqDetails, { - collectionName, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + collectionName, + } + ) }) it("accepts valid subcollection delete requests", async () => { @@ -354,10 +366,14 @@ describe("Collections Router", () => { .expect(200) expect( mockSubcollectionDirectoryService.deleteDirectory - ).toHaveBeenCalledWith(reqDetails, { - collectionName, - subcollectionName, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + collectionName, + subcollectionName, + } + ) }) }) @@ -392,7 +408,7 @@ describe("Collections Router", () => { expect(resp.body).toStrictEqual(reorderDetails.items) expect( mockCollectionDirectoryService.reorderDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { collectionName, objArray: reorderDetails.items, }) @@ -411,7 +427,7 @@ describe("Collections Router", () => { expect(resp.body).toStrictEqual(reorderDetails.items) expect( mockSubcollectionDirectoryService.reorderDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { collectionName, subcollectionName, objArray: reorderDetails.items, @@ -462,7 +478,7 @@ describe("Collections Router", () => { .send({ items, target: {} }) .expect(200) expect(mockCollectionDirectoryService.movePages).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { collectionName, objArray: items, @@ -476,7 +492,7 @@ describe("Collections Router", () => { .send({ items, target: { collectionName: targetCollectionName } }) .expect(200) expect(mockCollectionDirectoryService.movePages).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { collectionName, targetCollectionName, @@ -497,7 +513,7 @@ describe("Collections Router", () => { }) .expect(200) expect(mockCollectionDirectoryService.movePages).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { collectionName, targetCollectionName, @@ -515,7 +531,7 @@ describe("Collections Router", () => { .send({ items, target: {} }) .expect(200) expect(mockSubcollectionDirectoryService.movePages).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { collectionName, subcollectionName, @@ -532,7 +548,7 @@ describe("Collections Router", () => { .send({ items, target: { collectionName: targetCollectionName } }) .expect(200) expect(mockSubcollectionDirectoryService.movePages).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { collectionName, subcollectionName, @@ -556,7 +572,7 @@ describe("Collections Router", () => { }) .expect(200) expect(mockSubcollectionDirectoryService.movePages).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { collectionName, subcollectionName, diff --git a/src/routes/v2/authenticatedSites/__tests__/ContactUs.spec.js b/src/routes/v2/authenticatedSites/__tests__/ContactUs.spec.js index 5ee9444a7..8e0d300eb 100644 --- a/src/routes/v2/authenticatedSites/__tests__/ContactUs.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/ContactUs.spec.js @@ -6,6 +6,7 @@ const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") const { contactUsContent, contactUsSha } = require("@fixtures/contactUs") +const { mockUserWithSiteSessionData } = require("@fixtures/sessionData") const { ContactUsRouter } = require("../contactUs") @@ -32,9 +33,6 @@ describe("ContactUs Router", () => { const app = generateRouter(subrouter) const siteName = "test-site" - const accessToken = undefined // Can't set request fields - will always be undefined - - const reqDetails = { siteName, accessToken } beforeEach(() => { jest.clearAllMocks() @@ -51,7 +49,9 @@ describe("ContactUs Router", () => { const resp = await request(app).get(`/${siteName}/contactUs`).expect(200) expect(resp.body).toStrictEqual(expectedResponse) - expect(mockContactUsPageService.read).toHaveBeenCalledWith(reqDetails) + expect(mockContactUsPageService.read).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) }) }) @@ -96,7 +96,7 @@ describe("ContactUs Router", () => { .expect(200) expect(mockContactUsPageService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) diff --git a/src/routes/v2/authenticatedSites/__tests__/Homepage.spec.js b/src/routes/v2/authenticatedSites/__tests__/Homepage.spec.js index 149de0172..c20e5f1f7 100644 --- a/src/routes/v2/authenticatedSites/__tests__/Homepage.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/Homepage.spec.js @@ -8,6 +8,7 @@ const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") const { homepageContent } = require("@fixtures/homepage") +const { mockUserWithSiteSessionData } = require("@fixtures/sessionData") const { HomepageRouter } = require("../homepage") @@ -36,11 +37,8 @@ describe("Homepage Router", () => { const app = generateRouter(subrouter) const siteName = "test-site" - const accessToken = undefined // Can't set request fields - will always be undefined const mockSha = "12345" - const reqDetails = { siteName, accessToken } - beforeEach(() => { jest.clearAllMocks() }) @@ -56,7 +54,9 @@ describe("Homepage Router", () => { const resp = await request(app).get(`/${siteName}/homepage`).expect(200) expect(resp.body).toStrictEqual(expectedResponse) - expect(mockHomepagePageService.read).toHaveBeenCalledWith(reqDetails) + expect(mockHomepagePageService.read).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) }) it("returns appropriate failure on read failure", async () => { @@ -65,7 +65,9 @@ describe("Homepage Router", () => { ) await request(app).get(`/${siteName}/homepage`).expect(404) - expect(mockHomepagePageService.read).toHaveBeenCalledWith(reqDetails) + expect(mockHomepagePageService.read).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) }) }) @@ -111,7 +113,7 @@ describe("Homepage Router", () => { .expect(200) expect(mockHomepagePageService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -132,7 +134,7 @@ describe("Homepage Router", () => { .expect(200) expect(mockHomepagePageService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) diff --git a/src/routes/v2/authenticatedSites/__tests__/MediaCategories.spec.js b/src/routes/v2/authenticatedSites/__tests__/MediaCategories.spec.js index 2b10f21b2..c92d1607f 100644 --- a/src/routes/v2/authenticatedSites/__tests__/MediaCategories.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/MediaCategories.spec.js @@ -4,6 +4,10 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { + mockUserWithSiteSessionData, + mockGithubSessionData, +} = require("@fixtures/sessionData") const { MediaCategoriesRouter } = require("../mediaCategories") @@ -54,9 +58,6 @@ describe("Media Categories Router", () => { const currentCommitSha = undefined const treeSha = undefined - const reqDetails = { siteName, accessToken } - const additionalReqDetails = { ...reqDetails, currentCommitSha, treeSha } - beforeEach(() => { jest.clearAllMocks() }) @@ -88,7 +89,7 @@ describe("Media Categories Router", () => { .expect(200) expect(resp.body).toStrictEqual(expectedResponse) expect(mockMediaDirectoryService.listFiles).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { directoryName, } @@ -113,10 +114,14 @@ describe("Media Categories Router", () => { expect(resp.body).toStrictEqual({}) expect( mockMediaDirectoryService.createMediaDirectory - ).toHaveBeenCalledWith(reqDetails, { - directoryName, - objArray: undefined, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + directoryName, + objArray: undefined, + } + ) }) it("accepts valid category create requests with files and returns the details of the category created", async () => { mockMediaDirectoryService.createMediaDirectory.mockResolvedValueOnce({}) @@ -140,10 +145,14 @@ describe("Media Categories Router", () => { expect(resp.body).toStrictEqual({}) expect( mockMediaDirectoryService.createMediaDirectory - ).toHaveBeenCalledWith(reqDetails, { - directoryName, - objArray: mediaDetails.items, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + directoryName, + objArray: mediaDetails.items, + } + ) }) }) @@ -164,10 +173,14 @@ describe("Media Categories Router", () => { .expect(200) expect( mockMediaDirectoryService.renameMediaDirectory - ).toHaveBeenCalledWith(reqDetails, { - directoryName, - newDirectoryName, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + directoryName, + newDirectoryName, + } + ) }) }) @@ -178,9 +191,13 @@ describe("Media Categories Router", () => { .expect(200) expect( mockMediaDirectoryService.deleteMediaDirectory - ).toHaveBeenCalledWith(additionalReqDetails, { - directoryName, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + directoryName, + } + ) }) }) @@ -229,7 +246,8 @@ describe("Media Categories Router", () => { }) .expect(200) expect(mockMediaDirectoryService.moveMediaFiles).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, + mockGithubSessionData, { directoryName, targetDirectoryName: targetMediaCategory, diff --git a/src/routes/v2/authenticatedSites/__tests__/MediaFiles.spec.js b/src/routes/v2/authenticatedSites/__tests__/MediaFiles.spec.js index 487636dac..c010c210e 100644 --- a/src/routes/v2/authenticatedSites/__tests__/MediaFiles.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/MediaFiles.spec.js @@ -4,6 +4,10 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { + mockUserWithSiteSessionData, + mockGithubSessionData, +} = require("@fixtures/sessionData") const { MediaFilesRouter } = require("../mediaFiles") @@ -43,13 +47,10 @@ describe("Media Files Router", () => { const siteName = "test-site" const directoryName = "imageDir" - const accessToken = undefined // Can't set request fields - will always be undefined const fileName = "test-file" const mockSha = "12345" const mockContent = "mock-content" - const reqDetails = { siteName, accessToken } - beforeEach(() => { jest.clearAllMocks() }) @@ -78,7 +79,7 @@ describe("Media Files Router", () => { .send(pageDetails) .expect(200) expect(mockMediaFileService.create).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -102,7 +103,7 @@ describe("Media Files Router", () => { .expect(200) expect(resp.body).toStrictEqual(expectedResponse) expect(mockMediaFileService.read).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -138,7 +139,7 @@ describe("Media Files Router", () => { .send(updatePageDetails) .expect(200) expect(mockMediaFileService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -156,7 +157,8 @@ describe("Media Files Router", () => { .send(renamePageDetails) .expect(200) expect(mockMediaFileService.rename).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, + mockGithubSessionData, expectedServiceInput ) }) @@ -185,7 +187,7 @@ describe("Media Files Router", () => { .send(pageDetails) .expect(200) expect(mockMediaFileService.delete).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) diff --git a/src/routes/v2/authenticatedSites/__tests__/Navigation.spec.js b/src/routes/v2/authenticatedSites/__tests__/Navigation.spec.js index 2ea717338..cfdfdaaf0 100644 --- a/src/routes/v2/authenticatedSites/__tests__/Navigation.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/Navigation.spec.js @@ -6,6 +6,7 @@ const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") const { navigationContent, navigationSha } = require("@fixtures/navigation") +const { mockUserWithSiteSessionData } = require("@fixtures/sessionData") const { NavigationRouter } = require("../navigation") @@ -33,9 +34,6 @@ describe("Navigation Router", () => { const app = generateRouter(subrouter) const siteName = "test-site" - const accessToken = undefined // Can't set request fields - will always be undefined - - const reqDetails = { siteName, accessToken } beforeEach(() => { jest.clearAllMocks() @@ -52,7 +50,9 @@ describe("Navigation Router", () => { const resp = await request(app).get(`/${siteName}/navigation`).expect(200) expect(resp.body).toStrictEqual(expectedResponse) - expect(mockNavigationYmlService.read).toHaveBeenCalledWith(reqDetails) + expect(mockNavigationYmlService.read).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) }) }) @@ -103,7 +103,7 @@ describe("Navigation Router", () => { .expect(200) expect(mockNavigationYmlService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) diff --git a/src/routes/v2/authenticatedSites/__tests__/ResourceCategories.spec.js b/src/routes/v2/authenticatedSites/__tests__/ResourceCategories.spec.js index fd7532e78..7067186ea 100644 --- a/src/routes/v2/authenticatedSites/__tests__/ResourceCategories.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/ResourceCategories.spec.js @@ -4,6 +4,10 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { + mockUserWithSiteSessionData, + mockGithubSessionData, +} = require("@fixtures/sessionData") const { ResourceCategoriesRouter } = require("../resourceCategories") @@ -50,14 +54,6 @@ describe("Resource Categories Router", () => { const resourceRoomName = "resource-room" const resourceCategoryName = "resource-category" - // Can't set request fields - will always be undefined - const accessToken = undefined - const currentCommitSha = undefined - const treeSha = undefined - - const reqDetails = { siteName, accessToken } - const additionalReqDetails = { ...reqDetails, currentCommitSha, treeSha } - beforeEach(() => { jest.clearAllMocks() }) @@ -88,7 +84,7 @@ describe("Resource Categories Router", () => { .expect(200) expect(resp.body).toStrictEqual(expectedResponse) expect(mockResourceDirectoryService.listFiles).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { resourceRoomName, resourceCategoryName, @@ -119,7 +115,7 @@ describe("Resource Categories Router", () => { expect(resp.body).toStrictEqual({}) expect( mockResourceDirectoryService.createResourceDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { resourceRoomName, resourceCategoryName, }) @@ -147,11 +143,15 @@ describe("Resource Categories Router", () => { .expect(200) expect( mockResourceDirectoryService.renameResourceDirectory - ).toHaveBeenCalledWith(reqDetails, { - resourceRoomName, - resourceCategoryName, - newDirectoryName, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + resourceRoomName, + resourceCategoryName, + newDirectoryName, + } + ) }) }) @@ -164,10 +164,14 @@ describe("Resource Categories Router", () => { .expect(200) expect( mockResourceDirectoryService.deleteResourceDirectory - ).toHaveBeenCalledWith(reqDetails, { - resourceRoomName, - resourceCategoryName, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + resourceRoomName, + resourceCategoryName, + } + ) }) }) @@ -225,12 +229,16 @@ describe("Resource Categories Router", () => { .expect(200) expect( mockResourceDirectoryService.moveResourcePages - ).toHaveBeenCalledWith(reqDetails, { - resourceRoomName, - resourceCategoryName, - targetResourceCategory, - objArray: items, - }) + ).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockGithubSessionData, + { + resourceRoomName, + resourceCategoryName, + targetResourceCategory, + objArray: items, + } + ) }) }) }) diff --git a/src/routes/v2/authenticatedSites/__tests__/ResourcePages.spec.js b/src/routes/v2/authenticatedSites/__tests__/ResourcePages.spec.js index 0cd3afd44..b6a95a37a 100644 --- a/src/routes/v2/authenticatedSites/__tests__/ResourcePages.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/ResourcePages.spec.js @@ -4,6 +4,7 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { mockUserWithSiteSessionData } = require("@fixtures/sessionData") const { ResourcePagesRouter } = require("../resourcePages") @@ -44,13 +45,10 @@ describe("Resource Pages Router", () => { const siteName = "test-site" const resourceRoomName = "resource-room" const resourceCategoryName = "resource-category" - const accessToken = undefined // Can't set request fields - will always be undefined const fileName = "test-file" const mockSha = "12345" const mockContent = "mock-content" - const reqDetails = { siteName, accessToken } - beforeEach(() => { jest.clearAllMocks() }) @@ -92,7 +90,7 @@ describe("Resource Pages Router", () => { .send(pageDetails) .expect(200) expect(mockResourcePageService.create).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -117,7 +115,7 @@ describe("Resource Pages Router", () => { .expect(200) expect(resp.body).toStrictEqual(expectedResponse) expect(mockResourcePageService.read).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -166,7 +164,7 @@ describe("Resource Pages Router", () => { .send(updatePageDetails) .expect(200) expect(mockResourcePageService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -188,7 +186,7 @@ describe("Resource Pages Router", () => { .send(renamePageDetails) .expect(200) expect(mockResourcePageService.rename).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -222,7 +220,7 @@ describe("Resource Pages Router", () => { .send(pageDetails) .expect(200) expect(mockResourcePageService.delete).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) diff --git a/src/routes/v2/authenticatedSites/__tests__/ResourceRoom.spec.js b/src/routes/v2/authenticatedSites/__tests__/ResourceRoom.spec.js index 5fe4bf0f2..baa2c8c61 100644 --- a/src/routes/v2/authenticatedSites/__tests__/ResourceRoom.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/ResourceRoom.spec.js @@ -4,6 +4,7 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { mockUserWithSiteSessionData } = require("@fixtures/sessionData") const { ResourceRoomRouter } = require("../resourceRoom") @@ -49,14 +50,6 @@ describe("Resource Room Router", () => { const siteName = "test-site" const resourceRoomName = "resource-room" - // Can't set request fields - will always be undefined - const accessToken = undefined - const currentCommitSha = undefined - const treeSha = undefined - - const reqDetails = { siteName, accessToken } - const additionalReqDetails = { ...reqDetails, currentCommitSha, treeSha } - beforeEach(() => { jest.clearAllMocks() }) @@ -82,7 +75,7 @@ describe("Resource Room Router", () => { expect(resp.body).toStrictEqual(expectedResponse) expect( mockResourceRoomDirectoryService.listAllResourceCategories - ).toHaveBeenCalledWith(reqDetails, { resourceRoomName }) + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { resourceRoomName }) }) }) @@ -97,7 +90,7 @@ describe("Resource Room Router", () => { expect(resp.body).toStrictEqual({ resourceRoomName }) expect( mockResourceRoomDirectoryService.getResourceRoomDirectoryName - ).toHaveBeenCalledWith(reqDetails) + ).toHaveBeenCalledWith(mockUserWithSiteSessionData) }) }) @@ -120,7 +113,7 @@ describe("Resource Room Router", () => { expect(resp.body).toStrictEqual({}) expect( mockResourceRoomDirectoryService.createResourceRoomDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { resourceRoomName, }) }) @@ -143,7 +136,7 @@ describe("Resource Room Router", () => { .expect(200) expect( mockResourceRoomDirectoryService.renameResourceRoomDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { resourceRoomName, newDirectoryName, }) @@ -157,7 +150,7 @@ describe("Resource Room Router", () => { .expect(200) expect( mockResourceRoomDirectoryService.deleteResourceRoomDirectory - ).toHaveBeenCalledWith(reqDetails, { + ).toHaveBeenCalledWith(mockUserWithSiteSessionData, { resourceRoomName, }) }) diff --git a/src/routes/v2/authenticatedSites/__tests__/UnlinkedPages.spec.js b/src/routes/v2/authenticatedSites/__tests__/UnlinkedPages.spec.js index 97b25a496..352a3252b 100644 --- a/src/routes/v2/authenticatedSites/__tests__/UnlinkedPages.spec.js +++ b/src/routes/v2/authenticatedSites/__tests__/UnlinkedPages.spec.js @@ -4,6 +4,7 @@ const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") +const { mockUserWithSiteSessionData } = require("@fixtures/sessionData") const { UnlinkedPagesRouter } = require("../unlinkedPages") @@ -57,13 +58,10 @@ describe("Unlinked Pages Router", () => { const app = generateRouter(subrouter) const siteName = "test-site" - const accessToken = undefined // Can't set request fields - will always be undefined const fileName = "test-file" const mockSha = "12345" const mockContent = "mock-content" - const reqDetails = { siteName, accessToken } - beforeEach(() => { jest.clearAllMocks() }) @@ -97,7 +95,7 @@ describe("Unlinked Pages Router", () => { expect(resp.body).toStrictEqual(listPageResp) expect( mockUnlinkedPagesDirectoryService.listAllUnlinkedPages - ).toHaveBeenCalledWith(reqDetails) + ).toHaveBeenCalledWith(mockUserWithSiteSessionData) }) }) @@ -128,7 +126,7 @@ describe("Unlinked Pages Router", () => { .send(createPageDetails) .expect(200) expect(mockService.create).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -146,7 +144,7 @@ describe("Unlinked Pages Router", () => { } await request(app).get(`/${siteName}/pages/pages/${fileName}`).expect(200) expect(mockService.read).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -187,7 +185,7 @@ describe("Unlinked Pages Router", () => { .send(updatePageDetails) .expect(200) expect(mockService.update).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -205,7 +203,7 @@ describe("Unlinked Pages Router", () => { .send(renamePageDetails) .expect(200) expect(mockService.rename).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -233,7 +231,7 @@ describe("Unlinked Pages Router", () => { .send(deletePageDetails) .expect(200) expect(mockService.delete).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedServiceInput ) }) @@ -285,7 +283,7 @@ describe("Unlinked Pages Router", () => { .send(expectedRequestInput) .expect(200) expect(mockUnlinkedPagesDirectoryService.movePages).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { targetCollectionName: collectionName, objArray: items, @@ -302,7 +300,7 @@ describe("Unlinked Pages Router", () => { }) .expect(200) expect(mockUnlinkedPagesDirectoryService.movePages).toHaveBeenCalledWith( - reqDetails, + mockUserWithSiteSessionData, { targetCollectionName: collectionName, targetSubcollectionName: subCollectionName, diff --git a/src/routes/v2/authenticatedSites/collectionPages.js b/src/routes/v2/authenticatedSites/collectionPages.js index 5eaf4a61e..7487b6e0f 100644 --- a/src/routes/v2/authenticatedSites/collectionPages.js +++ b/src/routes/v2/authenticatedSites/collectionPages.js @@ -24,66 +24,77 @@ class CollectionPagesRouter { } // Create new page in collection - async createCollectionPage(req, res) { - const { accessToken } = res.locals + async createCollectionPage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, collectionName, subcollectionName } = req.params + const { collectionName, subcollectionName } = req.params const { error } = CreatePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { content: { frontMatter, pageBody }, newFileName, } = req.body - const reqDetails = { siteName, accessToken } let createResp if (subcollectionName) { - createResp = await this.subcollectionPageService.create(reqDetails, { - fileName: newFileName, - collectionName, - content: pageBody, - frontMatter, - subcollectionName, - }) + createResp = await this.subcollectionPageService.create( + userWithSiteSessionData, + { + fileName: newFileName, + collectionName, + content: pageBody, + frontMatter, + subcollectionName, + } + ) } else { - createResp = await this.collectionPageService.create(reqDetails, { - fileName: newFileName, - collectionName, - content: pageBody, - frontMatter, - }) + createResp = await this.collectionPageService.create( + userWithSiteSessionData, + { + fileName: newFileName, + collectionName, + content: pageBody, + frontMatter, + } + ) } - return res.status(200).json(createResp) + res.status(200).json(createResp) + return next() } // Read page in collection async readCollectionPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName, pageName, collectionName, subcollectionName } = req.params + const { pageName, collectionName, subcollectionName } = req.params - const reqDetails = { siteName, accessToken } let readResp if (subcollectionName) { - readResp = await this.subcollectionPageService.read(reqDetails, { - fileName: pageName, - collectionName, - subcollectionName, - }) + readResp = await this.subcollectionPageService.read( + userWithSiteSessionData, + { + fileName: pageName, + collectionName, + subcollectionName, + } + ) } else { - readResp = await this.collectionPageService.read(reqDetails, { - fileName: pageName, - collectionName, - }) + readResp = await this.collectionPageService.read( + userWithSiteSessionData, + { + fileName: pageName, + collectionName, + } + ) } return res.status(200).json(readResp) } // Update page in collection - async updateCollectionPage(req, res) { - const { accessToken } = res.locals + async updateCollectionPage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, pageName, collectionName, subcollectionName } = req.params + const { pageName, collectionName, subcollectionName } = req.params const { error } = UpdatePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -91,79 +102,90 @@ class CollectionPagesRouter { sha, newFileName, } = req.body - const reqDetails = { siteName, accessToken } let updateResp if (subcollectionName) { if (newFileName) { - updateResp = await this.subcollectionPageService.rename(reqDetails, { - oldFileName: pageName, - newFileName, - collectionName, - subcollectionName, - content: pageBody, - frontMatter, - sha, - }) + updateResp = await this.subcollectionPageService.rename( + userWithSiteSessionData, + { + oldFileName: pageName, + newFileName, + collectionName, + subcollectionName, + content: pageBody, + frontMatter, + sha, + } + ) } else { - updateResp = await this.subcollectionPageService.update(reqDetails, { - fileName: pageName, - collectionName, - subcollectionName, - content: pageBody, - frontMatter, - sha, - }) + updateResp = await this.subcollectionPageService.update( + userWithSiteSessionData, + { + fileName: pageName, + collectionName, + subcollectionName, + content: pageBody, + frontMatter, + sha, + } + ) } } else { /* eslint-disable no-lonely-if */ if (newFileName) { - updateResp = await this.collectionPageService.rename(reqDetails, { - oldFileName: pageName, - newFileName, - collectionName, - content: pageBody, - frontMatter, - sha, - }) + updateResp = await this.collectionPageService.rename( + userWithSiteSessionData, + { + oldFileName: pageName, + newFileName, + collectionName, + content: pageBody, + frontMatter, + sha, + } + ) } else { - updateResp = await this.collectionPageService.update(reqDetails, { - fileName: pageName, - collectionName, - content: pageBody, - frontMatter, - sha, - }) + updateResp = await this.collectionPageService.update( + userWithSiteSessionData, + { + fileName: pageName, + collectionName, + content: pageBody, + frontMatter, + sha, + } + ) } } - /* eslint-enable no-lonely-if */ - return res.status(200).json(updateResp) + res.status(200).json(updateResp) + return next() } // Delete page in collection - async deleteCollectionPage(req, res) { - const { accessToken } = res.locals + async deleteCollectionPage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, pageName, collectionName, subcollectionName } = req.params + const { pageName, collectionName, subcollectionName } = req.params const { error } = DeletePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { sha } = req.body - const reqDetails = { siteName, accessToken } if (subcollectionName) { - await this.subcollectionPageService.delete(reqDetails, { + await this.subcollectionPageService.delete(userWithSiteSessionData, { fileName: pageName, collectionName, subcollectionName, sha, }) } else { - await this.collectionPageService.delete(reqDetails, { + await this.collectionPageService.delete(userWithSiteSessionData, { fileName: pageName, collectionName, sha, }) } - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/collections.js b/src/routes/v2/authenticatedSites/collections.js index 48a702bc8..e9a84f5c6 100644 --- a/src/routes/v2/authenticatedSites/collections.js +++ b/src/routes/v2/authenticatedSites/collections.js @@ -26,42 +26,42 @@ class CollectionsRouter { // List all collections async listAllCollections(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params - const listResp = await this.collectionDirectoryService.listAllCollections({ - siteName, - accessToken, - }) + const listResp = await this.collectionDirectoryService.listAllCollections( + userWithSiteSessionData + ) return res.status(200).json(listResp) } // List files in a collection/subcollection async listCollectionDirectoryFiles(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName, collectionName, subcollectionName } = req.params + const { collectionName, subcollectionName } = req.params let listResp if (subcollectionName) { listResp = await this.subcollectionDirectoryService.listFiles( - { siteName, accessToken }, + userWithSiteSessionData, { collectionName, subcollectionName } ) } else { listResp = await this.collectionDirectoryService.listFiles( - { siteName, accessToken }, - { collectionName } + userWithSiteSessionData, + { + collectionName, + } ) } return res.status(200).json(listResp) } // Create new collection/subcollection - async createCollectionDirectory(req, res) { - const { accessToken } = res.locals + async createCollectionDirectory(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, collectionName } = req.params + const { collectionName } = req.params const { error } = CreateDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { newDirectoryName, items } = req.body @@ -69,7 +69,7 @@ class CollectionsRouter { if (collectionName) { // Creating subcollection createResp = await this.subcollectionDirectoryService.createDirectory( - { siteName, accessToken }, + userWithSiteSessionData, { collectionName, subcollectionName: newDirectoryName, @@ -79,7 +79,7 @@ class CollectionsRouter { } else { // Creating collection createResp = await this.collectionDirectoryService.createDirectory( - { siteName, accessToken }, + userWithSiteSessionData, { collectionName: newDirectoryName, objArray: items, @@ -87,20 +87,22 @@ class CollectionsRouter { ) } - return res.status(200).json(createResp) + res.status(200).json(createResp) + return next() } // Rename collection/subcollection - async renameCollectionDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async renameCollectionDirectory(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, collectionName, subcollectionName } = req.params + const { collectionName, subcollectionName } = req.params const { error } = RenameDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { newDirectoryName } = req.body if (subcollectionName) { await this.subcollectionDirectoryService.renameDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { collectionName, subcollectionName, @@ -109,7 +111,8 @@ class CollectionsRouter { ) } else { await this.collectionDirectoryService.renameDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { collectionName, newDirectoryName, @@ -117,17 +120,19 @@ class CollectionsRouter { ) } - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } // Delete collection/subcollection - async deleteCollectionDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async deleteCollectionDirectory(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, collectionName, subcollectionName } = req.params + const { collectionName, subcollectionName } = req.params if (subcollectionName) { await this.subcollectionDirectoryService.deleteDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { collectionName, subcollectionName, @@ -135,27 +140,29 @@ class CollectionsRouter { ) } else { await this.collectionDirectoryService.deleteDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { collectionName, } ) } - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } // Reorder collection/subcollection - async reorderCollectionDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async reorderCollectionDirectory(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, collectionName, subcollectionName } = req.params + const { collectionName, subcollectionName } = req.params const { error } = ReorderDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { items } = req.body let reorderResp if (subcollectionName) { reorderResp = await this.subcollectionDirectoryService.reorderDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, { collectionName, subcollectionName, @@ -164,21 +171,22 @@ class CollectionsRouter { ) } else { reorderResp = await this.collectionDirectoryService.reorderDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, { collectionName, objArray: items, } ) } - return res.status(200).json(reorderResp) + res.status(200).json(reorderResp) + return next() } // Move collection/subcollection pages - async moveCollectionDirectoryPages(req, res) { - const { accessToken } = res.locals + async moveCollectionDirectoryPages(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, collectionName, subcollectionName } = req.params + const { collectionName, subcollectionName } = req.params const { error } = MoveDirectoryPagesRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -190,7 +198,7 @@ class CollectionsRouter { } = req.body if (subcollectionName) { await this.subcollectionDirectoryService.movePages( - { siteName, accessToken }, + userWithSiteSessionData, { collectionName, subcollectionName, @@ -200,17 +208,15 @@ class CollectionsRouter { } ) } else { - await this.collectionDirectoryService.movePages( - { siteName, accessToken }, - { - collectionName, - targetCollectionName, - targetSubcollectionName, - objArray: items, - } - ) + await this.collectionDirectoryService.movePages(userWithSiteSessionData, { + collectionName, + targetCollectionName, + targetSubcollectionName, + objArray: items, + }) } - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/contactUs.js b/src/routes/v2/authenticatedSites/contactUs.js index e1f8e42cf..b146aa1f9 100644 --- a/src/routes/v2/authenticatedSites/contactUs.js +++ b/src/routes/v2/authenticatedSites/contactUs.js @@ -20,22 +20,19 @@ class ContactUsRouter { // Read contactUs file async readContactUs(req, res) { - const { siteName } = req.params - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const readResp = await this.contactUsPageService.read({ - siteName, - accessToken, - }) + const readResp = await this.contactUsPageService.read( + userWithSiteSessionData + ) return res.status(200).json(readResp) } // Update contactUs index file - async updateContactUs(req, res) { - const { accessToken } = res.locals + async updateContactUs(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params const { error } = UpdateContactUsSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -44,11 +41,12 @@ class ContactUsRouter { } = req.body const updatedContactUsPage = await this.contactUsPageService.update( - { siteName, accessToken }, + userWithSiteSessionData, { content: pageBody, frontMatter, sha } ) - return res.status(200).json(updatedContactUsPage) + res.status(200).json(updatedContactUsPage) + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/homepage.js b/src/routes/v2/authenticatedSites/homepage.js index 99c737db5..003c6bca3 100644 --- a/src/routes/v2/authenticatedSites/homepage.js +++ b/src/routes/v2/authenticatedSites/homepage.js @@ -20,23 +20,19 @@ class HomepageRouter { // Read homepage index file async readHomepage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params - - const readResp = await this.homepagePageService.read({ - siteName, - accessToken, - }) + const readResp = await this.homepagePageService.read( + userWithSiteSessionData + ) return res.status(200).json(readResp) } // Update homepage index file - async updateHomepage(req, res) { - const { accessToken } = res.locals + async updateHomepage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params const { error } = UpdateHomepageSchema.validate(req.body, { allowUnknown: true, }) @@ -47,11 +43,16 @@ class HomepageRouter { } = req.body const updatedHomepage = await this.homepagePageService.update( - { siteName, accessToken }, - { content: pageBody, frontMatter, sha } + userWithSiteSessionData, + { + content: pageBody, + frontMatter, + sha, + } ) - return res.status(200).json(updatedHomepage) + res.status(200).json(updatedHomepage) + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/index.js b/src/routes/v2/authenticatedSites/index.js index fa1698b46..0add03c75 100644 --- a/src/routes/v2/authenticatedSites/index.js +++ b/src/routes/v2/authenticatedSites/index.js @@ -1,3 +1,5 @@ +import { attachSiteHandler } from "@root/middleware" + const express = require("express") const { @@ -82,10 +84,13 @@ const { const { MoverService } = require("@services/moverServices/MoverService") const getAuthenticatedSitesSubrouter = ({ - authMiddleware, + authenticationMiddleware, + authorizationMiddleware, gitHubService, configYmlService, apiLogger, + notificationsService, + notificationOnEditHandler, }) => { const collectionYmlService = new CollectionYmlService({ gitHubService }) const homepagePageService = new HomepagePageService({ gitHubService }) @@ -185,11 +190,12 @@ const getAuthenticatedSitesSubrouter = ({ const authenticatedSitesSubrouter = express.Router({ mergeParams: true }) - authenticatedSitesSubrouter.use(authMiddleware.verifyJwt) - authenticatedSitesSubrouter.use(authMiddleware.useSiteAccessTokenIfAvailable) + authenticatedSitesSubrouter.use(authenticationMiddleware.verifyAccess) + authenticatedSitesSubrouter.use(attachSiteHandler) // NOTE: apiLogger needs to be after `verifyJwt` as it logs the github username // which is only available after verifying that the jwt is valid authenticatedSitesSubrouter.use(apiLogger) + authenticatedSitesSubrouter.use(authorizationMiddleware.verifySiteMember) authenticatedSitesSubrouter.use( "/collections/:collectionName", @@ -221,6 +227,7 @@ const getAuthenticatedSitesSubrouter = ({ authenticatedSitesSubrouter.use("/contactUs", contactUsV2Router.getRouter()) authenticatedSitesSubrouter.use("/homepage", homepageV2Router.getRouter()) authenticatedSitesSubrouter.use("/settings", settingsV2Router.getRouter()) + authenticatedSitesSubrouter.use(notificationOnEditHandler.createNotification) return authenticatedSitesSubrouter } diff --git a/src/routes/v2/authenticatedSites/mediaCategories.js b/src/routes/v2/authenticatedSites/mediaCategories.js index af5090687..a64dcb86c 100644 --- a/src/routes/v2/authenticatedSites/mediaCategories.js +++ b/src/routes/v2/authenticatedSites/mediaCategories.js @@ -24,73 +24,80 @@ class MediaCategoriesRouter { // List files in a resource category async listMediaDirectoryFiles(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName, directoryName } = req.params + const { directoryName } = req.params const listResp = await this.mediaDirectoryService.listFiles( - { siteName, accessToken }, - { directoryName } + userWithSiteSessionData, + { + directoryName, + } ) return res.status(200).json(listResp) } // Create new media directory - async createMediaDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async createMediaDirectory(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName } = req.params const { error } = CreateMediaDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { newDirectoryName, items } = req.body const createResp = await this.mediaDirectoryService.createMediaDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { directoryName: newDirectoryName, objArray: items, } ) - return res.status(200).json(createResp) + res.status(200).json(createResp) + return next() } // Rename resource category - async renameMediaDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async renameMediaDirectory(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, directoryName } = req.params + const { directoryName } = req.params const { error } = RenameMediaDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { newDirectoryName } = req.body await this.mediaDirectoryService.renameMediaDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { directoryName, newDirectoryName, } ) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } // Delete resource category - async deleteMediaDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async deleteMediaDirectory(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, directoryName } = req.params + const { directoryName } = req.params await this.mediaDirectoryService.deleteMediaDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { directoryName, } ) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } // Move resource category - async moveMediaFiles(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async moveMediaFiles(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, directoryName } = req.params + const { directoryName } = req.params const { error } = MoveMediaDirectoryFilesRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -98,14 +105,16 @@ class MediaCategoriesRouter { target: { directoryName: targetDirectoryName }, } = req.body await this.mediaDirectoryService.moveMediaFiles( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { directoryName, targetDirectoryName, objArray: items, } ) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/mediaFiles.js b/src/routes/v2/authenticatedSites/mediaFiles.js index 368b0da13..6443930d2 100644 --- a/src/routes/v2/authenticatedSites/mediaFiles.js +++ b/src/routes/v2/authenticatedSites/mediaFiles.js @@ -23,31 +23,33 @@ class MediaFilesRouter { } // Create new page in collection - async createMediaFile(req, res) { - const { accessToken } = res.locals + async createMediaFile(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, directoryName } = req.params + const { directoryName } = req.params const { error } = CreateMediaFileRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { content, newFileName } = req.body - const reqDetails = { siteName, accessToken } - const createResp = await this.mediaFileService.create(reqDetails, { - fileName: newFileName, - directoryName, - content, - }) + const createResp = await this.mediaFileService.create( + userWithSiteSessionData, + { + fileName: newFileName, + directoryName, + content, + } + ) - return res.status(200).json(createResp) + res.status(200).json(createResp) + return next() } // Read page in collection async readMediaFile(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName, fileName, directoryName } = req.params + const { fileName, directoryName } = req.params - const reqDetails = { siteName, accessToken } - const readResp = await this.mediaFileService.read(reqDetails, { + const readResp = await this.mediaFileService.read(userWithSiteSessionData, { fileName, directoryName, }) @@ -55,50 +57,54 @@ class MediaFilesRouter { } // Update page in collection - async updateMediaFile(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async updateMediaFile(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, fileName, directoryName } = req.params + const { fileName, directoryName } = req.params const { error } = UpdateMediaFileRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { content, sha, newFileName } = req.body - const reqDetails = { siteName, accessToken, currentCommitSha, treeSha } let updateResp if (newFileName) { - updateResp = await this.mediaFileService.rename(reqDetails, { - oldFileName: fileName, - newFileName, - directoryName, - content, - sha, - }) + updateResp = await this.mediaFileService.rename( + userWithSiteSessionData, + githubSessionData, + { + oldFileName: fileName, + newFileName, + directoryName, + content, + sha, + } + ) } else { - updateResp = await this.mediaFileService.update(reqDetails, { + updateResp = await this.mediaFileService.update(userWithSiteSessionData, { fileName, directoryName, content, sha, }) } - return res.status(200).json(updateResp) + res.status(200).json(updateResp) + return next() } // Delete page in collection - async deleteMediaFile(req, res) { - const { accessToken } = res.locals + async deleteMediaFile(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, fileName, directoryName } = req.params + const { fileName, directoryName } = req.params const { error } = DeleteMediaFileRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { sha } = req.body - const reqDetails = { siteName, accessToken } - await this.mediaFileService.delete(reqDetails, { + await this.mediaFileService.delete(userWithSiteSessionData, { fileName, directoryName, sha, }) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/navigation.js b/src/routes/v2/authenticatedSites/navigation.js index 62a40154b..291942a04 100644 --- a/src/routes/v2/authenticatedSites/navigation.js +++ b/src/routes/v2/authenticatedSites/navigation.js @@ -20,34 +20,32 @@ class NavigationRouter { // Read navigation file async readNavigation(req, res) { - const { siteName } = req.params - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const readResp = await this.navigationYmlService.read({ - siteName, - accessToken, - }) + const readResp = await this.navigationYmlService.read( + userWithSiteSessionData + ) return res.status(200).json(readResp) } // Update navigation index file - async updateNavigation(req, res) { + async updateNavigation(req, res, next) { const { error } = UpdateNavigationRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { - params: { siteName }, body: { content: fileContent, sha }, } = req - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals const updatedNavigationPage = await this.navigationYmlService.update( - { siteName, accessToken }, + userWithSiteSessionData, { fileContent, sha } ) - return res.status(200).json(updatedNavigationPage) + res.status(200).json(updatedNavigationPage) + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/resourceCategories.js b/src/routes/v2/authenticatedSites/resourceCategories.js index 9d5e8eb30..19dce7207 100644 --- a/src/routes/v2/authenticatedSites/resourceCategories.js +++ b/src/routes/v2/authenticatedSites/resourceCategories.js @@ -24,45 +24,47 @@ class ResourceCategoriesRouter { // List files in a resource category async listResourceDirectoryFiles(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName, resourceRoomName, resourceCategoryName } = req.params + const { resourceRoomName, resourceCategoryName } = req.params const listResp = await this.resourceDirectoryService.listFiles( - { siteName, accessToken }, + userWithSiteSessionData, { resourceRoomName, resourceCategoryName } ) return res.status(200).json(listResp) } // Create new resource category - async createResourceDirectory(req, res) { - const { accessToken } = res.locals + async createResourceDirectory(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, resourceRoomName } = req.params + const { resourceRoomName } = req.params const { error } = CreateResourceDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { newDirectoryName } = req.body const createResp = await this.resourceDirectoryService.createResourceDirectory( - { siteName, accessToken }, + userWithSiteSessionData, { resourceRoomName, resourceCategoryName: newDirectoryName, } ) - return res.status(200).json(createResp) + res.status(200).json(createResp) + return next() } // Rename resource category - async renameResourceDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async renameResourceDirectory(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, resourceRoomName, resourceCategoryName } = req.params + const { resourceRoomName, resourceCategoryName } = req.params const { error } = RenameResourceDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { newDirectoryName } = req.body await this.resourceDirectoryService.renameResourceDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { resourceRoomName, resourceCategoryName, @@ -70,29 +72,32 @@ class ResourceCategoriesRouter { } ) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } // Delete resource category - async deleteResourceDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async deleteResourceDirectory(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, resourceRoomName, resourceCategoryName } = req.params + const { resourceRoomName, resourceCategoryName } = req.params await this.resourceDirectoryService.deleteResourceDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { resourceRoomName, resourceCategoryName, } ) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } // Move resource category - async moveResourceDirectoryPages(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async moveResourceDirectoryPages(req, res, next) { + const { userWithSiteSessionData, githubSessionData } = res.locals - const { siteName, resourceRoomName, resourceCategoryName } = req.params + const { resourceRoomName, resourceCategoryName } = req.params const { error } = MoveResourceDirectoryPagesRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -100,7 +105,8 @@ class ResourceCategoriesRouter { target: { resourceCategoryName: targetResourceCategory }, } = req.body await this.resourceDirectoryService.moveResourcePages( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, + githubSessionData, { resourceRoomName, resourceCategoryName, @@ -108,7 +114,8 @@ class ResourceCategoriesRouter { objArray: items, } ) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/resourcePages.js b/src/routes/v2/authenticatedSites/resourcePages.js index cd1f3c659..c4111e1bf 100644 --- a/src/routes/v2/authenticatedSites/resourcePages.js +++ b/src/routes/v2/authenticatedSites/resourcePages.js @@ -6,7 +6,6 @@ const { BadRequestError } = require("@errors/BadRequestError") const { attachReadRouteHandlerWrapper, - attachWriteRouteHandlerWrapper, attachRollbackRouteHandlerWrapper, } = require("@middleware/routeHandler") @@ -24,59 +23,54 @@ class ResourcePagesRouter { } // Create new page in resource category - async createResourcePage(req, res) { - const { accessToken } = res.locals + async createResourcePage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, resourceRoomName, resourceCategoryName } = req.params + const { resourceRoomName, resourceCategoryName } = req.params const { error } = CreateResourcePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { content: { frontMatter, pageBody }, newFileName, } = req.body - const reqDetails = { siteName, accessToken } - const createResp = await this.resourcePageService.create(reqDetails, { - fileName: newFileName, - resourceRoomName, - resourceCategoryName, - content: pageBody, - frontMatter, - }) + const createResp = await this.resourcePageService.create( + userWithSiteSessionData, + { + fileName: newFileName, + resourceRoomName, + resourceCategoryName, + content: pageBody, + frontMatter, + } + ) - return res.status(200).json(createResp) + res.status(200).json(createResp) + return next() } // Read page in resource category async readResourcePage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { - siteName, - resourceRoomName, - resourceCategoryName, - pageName, - } = req.params + const { resourceRoomName, resourceCategoryName, pageName } = req.params - const reqDetails = { siteName, accessToken } - const readResp = await this.resourcePageService.read(reqDetails, { - fileName: pageName, - resourceRoomName, - resourceCategoryName, - }) + const readResp = await this.resourcePageService.read( + userWithSiteSessionData, + { + fileName: pageName, + resourceRoomName, + resourceCategoryName, + } + ) return res.status(200).json(readResp) } // Update page in resource category - async updateResourcePage(req, res) { - const { accessToken } = res.locals + async updateResourcePage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { - siteName, - resourceRoomName, - resourceCategoryName, - pageName, - } = req.params + const { resourceRoomName, resourceCategoryName, pageName } = req.params const { error } = UpdateResourcePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -84,53 +78,54 @@ class ResourcePagesRouter { sha, newFileName, } = req.body - const reqDetails = { siteName, accessToken } let updateResp if (newFileName) { - updateResp = await this.resourcePageService.rename(reqDetails, { - oldFileName: pageName, - newFileName, - resourceRoomName, - resourceCategoryName, - content: pageBody, - frontMatter, - sha, - }) + updateResp = await this.resourcePageService.rename( + userWithSiteSessionData, + { + oldFileName: pageName, + newFileName, + resourceRoomName, + resourceCategoryName, + content: pageBody, + frontMatter, + sha, + } + ) } else { - updateResp = await this.resourcePageService.update(reqDetails, { - fileName: pageName, - resourceRoomName, - resourceCategoryName, - content: pageBody, - frontMatter, - sha, - }) + updateResp = await this.resourcePageService.update( + userWithSiteSessionData, + { + fileName: pageName, + resourceRoomName, + resourceCategoryName, + content: pageBody, + frontMatter, + sha, + } + ) } - return res.status(200).json(updateResp) + res.status(200).json(updateResp) + return next() } // Delete page in resource category - async deleteResourcePage(req, res) { - const { accessToken } = res.locals + async deleteResourcePage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { - siteName, - resourceRoomName, - resourceCategoryName, - pageName, - } = req.params + const { resourceRoomName, resourceCategoryName, pageName } = req.params const { error } = DeleteResourcePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { sha } = req.body - const reqDetails = { siteName, accessToken } - await this.resourcePageService.delete(reqDetails, { + await this.resourcePageService.delete(userWithSiteSessionData, { fileName: pageName, resourceRoomName, resourceCategoryName, sha, }) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/resourceRoom.js b/src/routes/v2/authenticatedSites/resourceRoom.js index 59ef3666d..f50d06f34 100644 --- a/src/routes/v2/authenticatedSites/resourceRoom.js +++ b/src/routes/v2/authenticatedSites/resourceRoom.js @@ -23,11 +23,10 @@ class ResourceRoomRouter { // Get resource room name async getResourceRoomDirectoryName(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params const getResp = await this.resourceRoomDirectoryService.getResourceRoomDirectoryName( - { siteName, accessToken } + userWithSiteSessionData ) return res.status(200).json(getResp) @@ -35,14 +34,11 @@ class ResourceRoomRouter { // List all resource categories async listAllResourceCategories(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName, resourceRoomName } = req.params + const { resourceRoomName } = req.params const listResp = await this.resourceRoomDirectoryService.listAllResourceCategories( - { - siteName, - accessToken, - }, + userWithSiteSessionData, { resourceRoomName, } @@ -52,49 +48,50 @@ class ResourceRoomRouter { } // Create new resource room - async createResourceRoomDirectory(req, res) { - const { accessToken } = res.locals + async createResourceRoomDirectory(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params const { error } = CreateResourceDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { newDirectoryName } = req.body const createResp = await this.resourceRoomDirectoryService.createResourceRoomDirectory( - { siteName, accessToken }, + userWithSiteSessionData, { resourceRoomName: newDirectoryName, } ) - return res.status(200).json(createResp) + res.status(200).json(createResp) + return next() } // Rename resource room - async renameResourceRoomDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + async renameResourceRoomDirectory(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, resourceRoomName } = req.params + const { resourceRoomName } = req.params const { error } = RenameResourceDirectoryRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { newDirectoryName } = req.body await this.resourceRoomDirectoryService.renameResourceRoomDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, { resourceRoomName, newDirectoryName, } ) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } // Delete resource room async deleteResourceRoomDirectory(req, res) { - const { accessToken, currentCommitSha, treeSha } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName, resourceRoomName } = req.params + const { resourceRoomName } = req.params await this.resourceRoomDirectoryService.deleteResourceRoomDirectory( - { siteName, accessToken, currentCommitSha, treeSha }, + userWithSiteSessionData, { resourceRoomName, } diff --git a/src/routes/v2/authenticatedSites/settings.js b/src/routes/v2/authenticatedSites/settings.js index e81bbc13c..8d5b87850 100644 --- a/src/routes/v2/authenticatedSites/settings.js +++ b/src/routes/v2/authenticatedSites/settings.js @@ -22,15 +22,15 @@ class SettingsRouter { } async readSettingsPage(req, res) { - const { accessToken } = res.locals - const { siteName } = req.params - const reqDetails = { siteName, accessToken } + const { userWithSiteSessionData } = res.locals const { config, footer, navigation, - } = await this.settingsService.retrieveSettingsFiles(reqDetails) + } = await this.settingsService.retrieveSettingsFiles( + userWithSiteSessionData + ) return res.status(200).json({ ...SettingsService.extractConfigFields(config), @@ -39,22 +39,22 @@ class SettingsRouter { }) } - async updateSettingsPage(req, res) { + async updateSettingsPage(req, res, next) { const { body } = req - const { accessToken } = res.locals - const { siteName } = req.params + const { userWithSiteSessionData } = res.locals const { error } = UpdateSettingsRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) - const reqDetails = { siteName, accessToken } - const { config, footer, navigation, homepage, - } = await this.settingsService.retrieveSettingsFiles(reqDetails, true) + } = await this.settingsService.retrieveSettingsFiles( + userWithSiteSessionData, + true + ) // extract data const settings = body @@ -64,8 +64,7 @@ class SettingsRouter { navigationContent: updatedNavigationContent, } = SettingsService.retrieveSettingsFields(settings) - await this.settingsService.updateSettingsFiles({ - reqDetails, + await this.settingsService.updateSettingsFiles(userWithSiteSessionData, { config, homepage, footer, @@ -74,7 +73,8 @@ class SettingsRouter { updatedFooterContent, updatedNavigationContent, }) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } getRouter() { diff --git a/src/routes/v2/authenticatedSites/unlinkedPages.js b/src/routes/v2/authenticatedSites/unlinkedPages.js index 2b1f715c2..0b82a7ca6 100644 --- a/src/routes/v2/authenticatedSites/unlinkedPages.js +++ b/src/routes/v2/authenticatedSites/unlinkedPages.js @@ -25,20 +25,18 @@ class UnlinkedPagesRouter { } async listAllUnlinkedPages(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params const listResp = await this.unlinkedPagesDirectoryService.listAllUnlinkedPages( - { siteName, accessToken } + userWithSiteSessionData ) return res.status(200).json(listResp) } - async createUnlinkedPage(req, res) { - const { accessToken } = res.locals + async createUnlinkedPage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params const { error } = CreatePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -46,7 +44,7 @@ class UnlinkedPagesRouter { newFileName, } = req.body const createResp = await this.unlinkedPageService.create( - { siteName, accessToken }, + userWithSiteSessionData, { fileName: newFileName, content: pageBody, @@ -54,25 +52,28 @@ class UnlinkedPagesRouter { } ) - return res.status(200).json(createResp) + res.status(200).json(createResp) + return next() } async readUnlinkedPage(req, res) { - const { accessToken } = res.locals + const { userWithSiteSessionData } = res.locals - const { siteName, pageName } = req.params + const { pageName } = req.params const { sha, content } = await this.unlinkedPageService.read( - { siteName, accessToken }, - { fileName: pageName } + userWithSiteSessionData, + { + fileName: pageName, + } ) return res.status(200).json({ pageName, sha, content }) } - async updateUnlinkedPage(req, res) { - const { accessToken } = res.locals + async updateUnlinkedPage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, pageName } = req.params + const { pageName } = req.params const { error } = UpdatePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -84,7 +85,7 @@ class UnlinkedPagesRouter { let updateResp if (newFileName) { updateResp = await this.unlinkedPageService.rename( - { siteName, accessToken }, + userWithSiteSessionData, { oldFileName: pageName, newFileName, @@ -95,7 +96,7 @@ class UnlinkedPagesRouter { ) } else { updateResp = await this.unlinkedPageService.update( - { siteName, accessToken }, + userWithSiteSessionData, { fileName: pageName, content: pageBody, @@ -105,31 +106,29 @@ class UnlinkedPagesRouter { ) } - return res.status(200).json(updateResp) + res.status(200).json(updateResp) + return next() } - async deleteUnlinkedPage(req, res) { - const { accessToken } = res.locals + async deleteUnlinkedPage(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName, pageName } = req.params + const { pageName } = req.params const { error } = DeletePageRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { sha } = req.body - await this.unlinkedPageService.delete( - { siteName, accessToken }, - { - fileName: pageName, - sha, - } - ) + await this.unlinkedPageService.delete(userWithSiteSessionData, { + fileName: pageName, + sha, + }) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } - async moveUnlinkedPages(req, res) { - const { accessToken } = res.locals + async moveUnlinkedPages(req, res, next) { + const { userWithSiteSessionData } = res.locals - const { siteName } = req.params const { error } = MoveDirectoryPagesRequestSchema.validate(req.body) if (error) throw new BadRequestError(error.message) const { @@ -140,14 +139,15 @@ class UnlinkedPagesRouter { }, } = req.body await this.unlinkedPagesDirectoryService.movePages( - { siteName, accessToken }, + userWithSiteSessionData, { targetCollectionName, targetSubcollectionName, objArray: items, } ) - return res.status(200).send("OK") + res.status(200).send("OK") + return next() } getRouter() { diff --git a/src/server.js b/src/server.js index d2190ed32..f8e80b693 100644 --- a/src/server.js +++ b/src/server.js @@ -1,5 +1,10 @@ import "dd-trace/init" import "module-alias/register" +import SequelizeStoreFactory from "connect-session-sequelize" +import session from "express-session" +import nocache from "nocache" + +import { config } from "@config/config" import logger from "@logger/logger" @@ -11,43 +16,70 @@ import { Whitelist, AccessToken, Repo, + Otp, Deployment, Launch, Redirection, + IsomerAdmin, + Notification, + ReviewRequest, + ReviewMeta, + Reviewer, + ReviewRequestView, } from "@database/models" import bootstrap from "@root/bootstrap" -import { getAuthMiddleware } from "@root/middleware" +import { + getAuthenticationMiddleware, + getAuthorizationMiddleware, +} from "@root/middleware" import { isomerRepoAxiosInstance } from "@services/api/AxiosInstance" import { getIdentityAuthService, getUsersService, - sitesService, + isomerAdminsService, + notificationsService, } from "@services/identity" import DeploymentsService from "@services/identity/DeploymentsService" import QueueService from "@services/identity/QueueService" import ReposService from "@services/identity/ReposService" +import SitesService from "@services/identity/SitesService" import InfraService from "@services/infra/InfraService" +import ReviewRequestService from "@services/review/ReviewRequestService" import { apiLogger } from "./middleware/apiLogger" +import { NotificationOnEditHandler } from "./middleware/notificationOnEditHandler" import getAuthenticatedSubrouterV1 from "./routes/v1/authenticated" import getAuthenticatedSitesSubrouterV1 from "./routes/v1/authenticatedSites" import getAuthenticatedSubrouter from "./routes/v2/authenticated" +import { ReviewsRouter } from "./routes/v2/authenticated/review" import getAuthenticatedSitesSubrouter from "./routes/v2/authenticatedSites" +import CollaboratorsService from "./services/identity/CollaboratorsService" import LaunchClient from "./services/identity/LaunchClient" import LaunchesService from "./services/identity/LaunchesService" +import { rateLimiter } from "./services/utilServices/RateLimiter" +import { isSecure } from "./utils/auth-utils" const path = require("path") +const AUTH_TOKEN_EXPIRY_MS = config.get("auth.tokenExpiry") + const sequelize = initSequelize([ Site, SiteMember, User, Whitelist, AccessToken, + Otp, Repo, Deployment, Launch, Redirection, + IsomerAdmin, + Notification, + ReviewMeta, + Reviewer, + ReviewRequest, + ReviewRequestView, ]) const usersService = getUsersService(sequelize) @@ -57,9 +89,29 @@ const express = require("express") const helmet = require("helmet") const createError = require("http-errors") -// Env vars -const { FRONTEND_URL } = process.env +const SESSION_SECRET = config.get("auth.sessionSecret") +const SequelizeStore = SequelizeStoreFactory(session.Store) +const sessionMiddleware = session({ + store: new SequelizeStore({ + db: sequelize, + tableName: "sessions", + checkExpirationInterval: 15 * 60 * 1000, // Checks expired sessions every 15 minutes + }), + resave: false, // can set to false since touch is implemented by our store + saveUninitialized: false, // do not save new sessions that have not been modified + cookie: { + httpOnly: true, + sameSite: "strict", + secure: isSecure, + maxAge: AUTH_TOKEN_EXPIRY_MS, + }, + secret: SESSION_SECRET, + name: "isomer", +}) + +// Env vars +const FRONTEND_URL = config.get("app.frontendUrl") // Import middleware // Import routes @@ -76,6 +128,26 @@ const { const { AuthService } = require("@services/utilServices/AuthService") const authService = new AuthService({ usersService }) +const gitHubService = new GitHubService({ + axiosInstance: isomerRepoAxiosInstance, +}) +const configYmlService = new ConfigYmlService({ gitHubService }) +const reviewRequestService = new ReviewRequestService( + gitHubService, + User, + ReviewRequest, + Reviewer, + ReviewMeta, + ReviewRequestView +) +const sitesService = new SitesService({ + siteRepository: Site, + gitHubService, + configYmlService, + usersService, + isomerAdminsService, + reviewRequestService, +}) const reposService = new ReposService({ repository: Repo }) const deploymentsService = new DeploymentsService({ repository: Deployment }) const launchClient = new LaunchClient() @@ -98,40 +170,76 @@ const infraService = new InfraService({ // poller for incoming queue infraService.pollQueue() -const gitHubService = new GitHubService({ - axiosInstance: isomerRepoAxiosInstance, -}) const identityAuthService = getIdentityAuthService(gitHubService) -const configYmlService = new ConfigYmlService({ gitHubService }) +const collaboratorsService = new CollaboratorsService({ + siteRepository: Site, + siteMemberRepository: SiteMember, + sitesService, + usersService, + whitelist: Whitelist, +}) -const authMiddleware = getAuthMiddleware({ identityAuthService }) +const authenticationMiddleware = getAuthenticationMiddleware() +const authorizationMiddleware = getAuthorizationMiddleware({ + identityAuthService, + usersService, + isomerAdminsService, + collaboratorsService, +}) +const notificationOnEditHandler = new NotificationOnEditHandler({ + reviewRequestService, + sitesService, + collaboratorsService, + notificationsService, +}) +const reviewRouter = new ReviewsRouter( + reviewRequestService, + usersService, + sitesService, + collaboratorsService, + notificationsService +) const authenticatedSubrouterV1 = getAuthenticatedSubrouterV1({ - authMiddleware, + authenticationMiddleware, usersService, apiLogger, }) const authenticatedSitesSubrouterV1 = getAuthenticatedSitesSubrouterV1({ - authMiddleware, + authenticationMiddleware, + authorizationMiddleware, apiLogger, }) const authenticatedSubrouterV2 = getAuthenticatedSubrouter({ - authMiddleware, - gitHubService, - configYmlService, + authenticationMiddleware, + sitesService, usersService, reposService, deploymentsService, apiLogger, + isomerAdminsService, + collaboratorsService, + authorizationMiddleware, + reviewRouter, + notificationsService, }) + const authenticatedSitesSubrouterV2 = getAuthenticatedSitesSubrouter({ - authMiddleware, + authorizationMiddleware, + authenticationMiddleware, gitHubService, configYmlService, apiLogger, + notificationsService, + notificationOnEditHandler, +}) +const authV2Router = new AuthRouter({ + authenticationMiddleware, + authService, + apiLogger, + rateLimiter, }) -const authV2Router = new AuthRouter({ authMiddleware, authService, apiLogger }) const formsgRouter = new FormsgRouter({ usersService, infraService }) const formsgSiteLaunchRouter = new FormsgSiteLaunchRouter({ usersService, @@ -139,6 +247,12 @@ const formsgSiteLaunchRouter = new FormsgSiteLaunchRouter({ }) const app = express() + +if (isSecure) { + // Our server only receives requests from the alb reverse proxy, so we need to use the client IP provided in X-Forwarded-For + // This is trusted because our security groups block all other access to the server + app.set("trust proxy", true) +} app.use(helmet()) app.use( @@ -151,6 +265,9 @@ app.use(express.json({ limit: "7mb" })) app.use(express.urlencoded({ extended: false })) app.use(cookieParser()) app.use(express.static(path.join(__dirname, "public"))) +app.use(nocache()) + +app.use(sessionMiddleware) // Health endpoint app.use("/v2/ping", (req, res, next) => res.status(200).send("Ok")) @@ -164,10 +281,10 @@ app.use("/v1/sites/:siteName", authenticatedSitesSubrouterV1) app.use("/v1", authenticatedSubrouterV1) app.use("/v2/auth", authV2Router.getRouter()) -// Endpoints which have siteName, used to inject site access token -app.use("/v2/sites/:siteName", authenticatedSitesSubrouterV2) // Endpoints which have require login, but not site access token app.use("/v2", authenticatedSubrouterV2) +// Endpoints which modify the github repo, used to inject site access token +app.use("/v2/sites/:siteName", authenticatedSitesSubrouterV2) // FormSG Backend handler routes app.use("/formsg", formsgRouter.getRouter()) diff --git a/src/services/api/AxiosInstance.ts b/src/services/api/AxiosInstance.ts index 7950d372f..b9c7a927f 100644 --- a/src/services/api/AxiosInstance.ts +++ b/src/services/api/AxiosInstance.ts @@ -1,12 +1,28 @@ import axios, { AxiosRequestConfig, AxiosResponse } from "axios" +import { config } from "@config/config" + import logger from "@logger/logger" +import { getAccessToken } from "@utils/token-retrieval-utils" + // Env vars -const { GITHUB_ORG_NAME } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") -const requestFormatter = (config: AxiosRequestConfig) => { +const requestFormatter = async (config: AxiosRequestConfig) => { logger.info("Making GitHub API call") + + const authMessage = config.headers.Authorization + + // If accessToken is missing, authMessage is `token ` + if ( + !authMessage || + authMessage === "token " || + authMessage === "token undefined" + ) { + const accessToken = await getAccessToken() + config.headers.Authorization = `token ${accessToken}` + } return { ...config, headers: { diff --git a/src/services/configServices/NetlifyTomlService.js b/src/services/configServices/NetlifyTomlService.js index bcb4df156..eeff13441 100644 --- a/src/services/configServices/NetlifyTomlService.js +++ b/src/services/configServices/NetlifyTomlService.js @@ -1,5 +1,7 @@ const toml = require("toml") +const { config } = require("@config/config") + // Import error types const { NotFoundError } = require("@errors/NotFoundError") @@ -9,10 +11,12 @@ const { genericGitHubAxiosInstance, } = require("@root/services/api/AxiosInstance") -const { GITHUB_BUILD_ORG_NAME, GITHUB_BUILD_REPO_NAME } = process.env +const GITHUB_BUILD_ORG_NAME = config.get("github.buildOrgName") +const GITHUB_BUILD_REPO_NAME = config.get("github.buildRepo") class NetlifyTomlService { - async read({ accessToken }) { + async read(sessionData) { + const { accessToken } = sessionData const endpoint = `https://api.github.com/repos/${GITHUB_BUILD_ORG_NAME}/${GITHUB_BUILD_REPO_NAME}/contents/overrides/netlify.toml` const resp = await genericGitHubAxiosInstance.get(endpoint, { diff --git a/src/services/configServices/SettingsService.js b/src/services/configServices/SettingsService.js index 89bf87048..1678d6c91 100644 --- a/src/services/configServices/SettingsService.js +++ b/src/services/configServices/SettingsService.js @@ -17,12 +17,12 @@ class SettingsService { autoBind(this) } - async retrieveSettingsFiles(reqDetails, shouldRetrieveHomepage) { + async retrieveSettingsFiles(sessionData, shouldRetrieveHomepage) { const fileRetrievalObj = { - config: this.configYmlService.read(reqDetails), - footer: this.footerYmlService.read(reqDetails), - navigation: this.navYmlService.read(reqDetails), - homepage: this.homepagePageService.read(reqDetails), + config: this.configYmlService.read(sessionData), + footer: this.footerYmlService.read(sessionData), + navigation: this.navYmlService.read(sessionData), + homepage: this.homepagePageService.read(sessionData), } const [config, footer, navigation, homepage] = await Bluebird.map( @@ -43,16 +43,18 @@ class SettingsService { } } - async updateSettingsFiles({ - reqDetails, - config, - homepage, - footer, - navigation, - updatedConfigContent, - updatedFooterContent, - updatedNavigationContent, - }) { + async updateSettingsFiles( + sessionData, + { + config, + homepage, + footer, + navigation, + updatedConfigContent, + updatedFooterContent, + updatedNavigationContent, + } + ) { if (!_.isEmpty(updatedConfigContent)) { const mergedConfigContent = this.mergeUpdatedData( config.content, @@ -66,7 +68,7 @@ class SettingsService { mergedConfigContent.url = `https://${mergedConfigContent.url}` } - await this.configYmlService.update(reqDetails, { + await this.configYmlService.update(sessionData, { fileContent: mergedConfigContent, sha: config.sha, }) @@ -83,7 +85,7 @@ class SettingsService { updatedConfigContent.description if (updatedConfigContent.shareicon) updatedHomepageFrontMatter.image = updatedConfigContent.shareicon - await this.homepagePageService.update(reqDetails, { + await this.homepagePageService.update(sessionData, { content: homepage.content.pageBody, frontMatter: updatedHomepageFrontMatter, sha: homepage.sha, @@ -96,7 +98,7 @@ class SettingsService { footer.content, updatedFooterContent ) - await this.footerYmlService.update(reqDetails, { + await this.footerYmlService.update(sessionData, { fileContent: mergedFooterContent, sha: footer.sha, }) @@ -107,7 +109,7 @@ class SettingsService { navigation.content, updatedNavigationContent ) - await this.navYmlService.update(reqDetails, { + await this.navYmlService.update(sessionData, { fileContent: mergedNavigationContent, sha: navigation.sha, }) diff --git a/src/services/configServices/__tests__/NetlifyTomlService.spec.js b/src/services/configServices/__tests__/NetlifyTomlService.spec.js index de3e41800..c304a47f4 100644 --- a/src/services/configServices/__tests__/NetlifyTomlService.spec.js +++ b/src/services/configServices/__tests__/NetlifyTomlService.spec.js @@ -1,7 +1,13 @@ const { Base64 } = require("js-base64") +const { config } = require("@config/config") + const validateStatus = require("@utils/axios-utils") +const { + mockUserWithSiteSessionData, + mockAccessToken, +} = require("@fixtures/sessionData") const { netlifyTomlContent, netlifyTomlHeaderValues, @@ -10,13 +16,10 @@ const { genericGitHubAxiosInstance, } = require("@root/services/api/AxiosInstance") -const { GITHUB_BUILD_ORG_NAME, GITHUB_BUILD_REPO_NAME } = process.env +const GITHUB_BUILD_ORG_NAME = config.get("github.buildOrgName") +const GITHUB_BUILD_REPO_NAME = config.get("github.buildRepo") describe("NetlifyToml Service", () => { - const accessToken = "test-token" - - const reqDetails = { accessToken } - const { NetlifyTomlService, } = require("@services/configServices/NetlifyTomlService") @@ -35,7 +38,7 @@ describe("NetlifyToml Service", () => { }, })) - await expect(service.read(reqDetails)).resolves.toEqual( + await expect(service.read(mockUserWithSiteSessionData)).resolves.toEqual( netlifyTomlHeaderValues ) @@ -44,7 +47,7 @@ describe("NetlifyToml Service", () => { { validateStatus, headers: { - Authorization: `token ${accessToken}`, + Authorization: `token ${mockAccessToken}`, }, } ) diff --git a/src/services/configServices/__tests__/SettingsService.spec.js b/src/services/configServices/__tests__/SettingsService.spec.js index 8f7fcd384..e26f9352e 100644 --- a/src/services/configServices/__tests__/SettingsService.spec.js +++ b/src/services/configServices/__tests__/SettingsService.spec.js @@ -2,14 +2,11 @@ const { configContent, configSha } = require("@fixtures/config") const { footerContent, footerSha } = require("@fixtures/footer") const { homepageContent, homepageSha } = require("@fixtures/homepage") const { navigationContent, navigationSha } = require("@fixtures/navigation") +const { mockUserWithSiteSessionData } = require("@fixtures/sessionData") const { SettingsService } = require("../SettingsService") describe("Settings Service", () => { - const siteName = "test-site" - const accessToken = "test-token" - const reqDetails = { siteName, accessToken } - const config = { content: configContent, sha: configSha, @@ -66,7 +63,7 @@ describe("Settings Service", () => { it("retrieves settings data without homepage", async () => { await expect( - service.retrieveSettingsFiles(reqDetails) + service.retrieveSettingsFiles(mockUserWithSiteSessionData) ).resolves.toMatchObject({ config, footer, @@ -80,7 +77,7 @@ describe("Settings Service", () => { it("retrieves settings data with homepage", async () => { await expect( - service.retrieveSettingsFiles(reqDetails, true) + service.retrieveSettingsFiles(mockUserWithSiteSessionData, true) ).resolves.toMatchObject({ config, footer, @@ -121,8 +118,7 @@ describe("Settings Service", () => { } await expect( - service.updateSettingsFiles({ - reqDetails, + service.updateSettingsFiles(mockUserWithSiteSessionData, { config, homepage, footer, @@ -134,7 +130,7 @@ describe("Settings Service", () => { ).resolves.not.toThrow() expect(mockConfigYmlService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedConfigServiceInput ) expect(mockFooterYmlService.update).not.toHaveBeenCalled() @@ -165,8 +161,7 @@ describe("Settings Service", () => { } await expect( - service.updateSettingsFiles({ - reqDetails, + service.updateSettingsFiles(mockUserWithSiteSessionData, { config, homepage, footer, @@ -178,13 +173,13 @@ describe("Settings Service", () => { ).resolves.not.toThrow() expect(mockConfigYmlService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedConfigServiceInput ) expect(mockFooterYmlService.update).not.toHaveBeenCalled() expect(mockNavYmlService.update).not.toHaveBeenCalled() expect(mockHomepagePageService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedHomepageServiceInput ) }) @@ -212,8 +207,7 @@ describe("Settings Service", () => { } await expect( - service.updateSettingsFiles({ - reqDetails, + service.updateSettingsFiles(mockUserWithSiteSessionData, { config, homepage, footer, @@ -225,13 +219,13 @@ describe("Settings Service", () => { ).resolves.not.toThrow() expect(mockConfigYmlService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedConfigServiceInput ) expect(mockFooterYmlService.update).not.toHaveBeenCalled() expect(mockNavYmlService.update).not.toHaveBeenCalled() expect(mockHomepagePageService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedHomepageServiceInput ) }) @@ -251,8 +245,7 @@ describe("Settings Service", () => { } await expect( - service.updateSettingsFiles({ - reqDetails, + service.updateSettingsFiles(mockUserWithSiteSessionData, { config, homepage, footer, @@ -265,7 +258,7 @@ describe("Settings Service", () => { expect(mockConfigYmlService.update).toHaveBeenCalledTimes(0) expect(mockFooterYmlService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedFooterServiceInput ) expect(mockNavYmlService.update).toHaveBeenCalledTimes(0) @@ -287,8 +280,7 @@ describe("Settings Service", () => { } await expect( - service.updateSettingsFiles({ - reqDetails, + service.updateSettingsFiles(mockUserWithSiteSessionData, { config, homepage, footer, @@ -302,7 +294,7 @@ describe("Settings Service", () => { expect(mockConfigYmlService.update).not.toHaveBeenCalled() expect(mockFooterYmlService.update).not.toHaveBeenCalled() expect(mockNavYmlService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedNavigationServiceInput ) expect(mockHomepagePageService.update).not.toHaveBeenCalled() @@ -352,8 +344,7 @@ describe("Settings Service", () => { } await expect( - service.updateSettingsFiles({ - reqDetails, + service.updateSettingsFiles(mockUserWithSiteSessionData, { config, homepage, footer, @@ -365,19 +356,19 @@ describe("Settings Service", () => { ).resolves.not.toThrow() expect(mockConfigYmlService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedConfigServiceInput ) expect(mockFooterYmlService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedFooterServiceInput ) expect(mockNavYmlService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedNavigationServiceInput ) expect(mockHomepagePageService.update).toHaveBeenLastCalledWith( - reqDetails, + mockUserWithSiteSessionData, expectedHomepageServiceInput ) }) diff --git a/src/services/db/GitHubService.js b/src/services/db/GitHubService.js index db3532c49..5fe7df9e0 100644 --- a/src/services/db/GitHubService.js +++ b/src/services/db/GitHubService.js @@ -1,7 +1,5 @@ const { Base64 } = require("js-base64") -const validateStatus = require("@utils/axios-utils") - const BRANCH_REF = "staging" const { @@ -9,12 +7,62 @@ const { inputNameConflictErrorMsg, } = require("@errors/ConflictError") const { NotFoundError } = require("@errors/NotFoundError") +const { UnprocessableError } = require("@errors/UnprocessableError") + +const validateStatus = require("@utils/axios-utils") + +const ReviewApi = require("./review") class GitHubService { constructor({ axiosInstance }) { this.axiosInstance = axiosInstance } + getCommitDiff(siteName, base, head) { + return ReviewApi.getCommitDiff(siteName, base, head) + } + + createPullRequest(siteName, title, description) { + return ReviewApi.createPullRequest(siteName, title, description) + } + + getPullRequest(siteName, pullRequestNumber) { + return ReviewApi.getPullRequest(siteName, pullRequestNumber) + } + + getBlob(repo, path, ref) { + return ReviewApi.getBlob(repo, path, ref) + } + + updatePullRequest(siteName, pullRequestNumber, title, description) { + return ReviewApi.updatePullRequest( + siteName, + pullRequestNumber, + title, + description + ) + } + + closeReviewRequest(siteName, pullRequestNumber) { + return ReviewApi.closeReviewRequest(siteName, pullRequestNumber) + } + + mergePullRequest(siteName, pullRequestNumber) { + return ReviewApi.mergePullRequest(siteName, pullRequestNumber) + } + + approvePullRequest(siteName, pullRequestNumber) { + return ReviewApi.approvePullRequest(siteName, pullRequestNumber) + } + + async getComments(siteName, pullRequestNumber) { + return ReviewApi.getComments(siteName, pullRequestNumber) + } + + async createComment(siteName, pullRequestNumber, user, message) { + return ReviewApi.createComment(siteName, pullRequestNumber, user, message) + } + getFilePath({ siteName, fileName, directoryName }) { if (!directoryName) return `${siteName}/contents/${encodeURIComponent(fileName)}` @@ -40,16 +88,22 @@ class GitHubService { } async create( - { accessToken, siteName }, + sessionData, { content, fileName, directoryName, isMedia = false } ) { + const { accessToken, siteName, isomerUserId: userId } = sessionData try { const endpoint = this.getFilePath({ siteName, fileName, directoryName }) // Validation and sanitisation of media already done const encodedContent = isMedia ? content : Base64.encode(content) - const params = { + const message = JSON.stringify({ message: `Create file: ${fileName}`, + fileName, + userId, + }) + const params = { + message, content: encodedContent, branch: BRANCH_REF, } @@ -69,7 +123,9 @@ class GitHubService { } } - async read({ accessToken, siteName }, { fileName, directoryName }) { + async read(sessionData, { fileName, directoryName }) { + const { accessToken } = sessionData + const { siteName } = sessionData const endpoint = this.getFilePath({ siteName, fileName, directoryName }) const params = { @@ -91,12 +147,14 @@ class GitHubService { return { content, sha } } - async readMedia({ accessToken, siteName }, { fileSha }) { + async readMedia(sessionData, { fileSha }) { /** * Files that are bigger than 1 MB needs to be retrieved * via Github Blob API. The content can only be retrieved through * the `sha` of the file. */ + const { accessToken } = sessionData + const { siteName } = sessionData const params = { ref: BRANCH_REF, } @@ -119,7 +177,9 @@ class GitHubService { return { content, sha } } - async readDirectory({ accessToken, siteName }, { directoryName }) { + async readDirectory(sessionData, { directoryName }) { + const { accessToken } = sessionData + const { siteName } = sessionData const endpoint = this.getFolderPath({ siteName, directoryName }) const params = { @@ -138,25 +198,28 @@ class GitHubService { return resp.data } - async update( - { accessToken, siteName }, - { fileContent, sha, fileName, directoryName } - ) { + async update(sessionData, { fileContent, sha, fileName, directoryName }) { + const { accessToken, siteName, isomerUserId: userId } = sessionData try { const endpoint = this.getFilePath({ siteName, fileName, directoryName }) const encodedNewContent = Base64.encode(fileContent) let fileSha = sha if (!sha) { - const { sha: retrievedSha } = await this.read( - { accessToken, siteName }, - { fileName, directoryName } - ) + const { sha: retrievedSha } = await this.read(sessionData, { + fileName, + directoryName, + }) fileSha = retrievedSha } - const params = { + const message = JSON.stringify({ message: `Update file: ${fileName}`, + fileName, + userId, + }) + const params = { + message, content: encodedNewContent, branch: BRANCH_REF, sha: fileSha, @@ -181,7 +244,8 @@ class GitHubService { } } - async delete({ accessToken, siteName }, { sha, fileName, directoryName }) { + async delete(sessionData, { sha, fileName, directoryName }) { + const { accessToken, siteName, isomerUserId: userId } = sessionData try { const endpoint = this.getFilePath({ siteName, fileName, directoryName }) @@ -195,8 +259,13 @@ class GitHubService { fileSha = retrievedSha } - const params = { + const message = JSON.stringify({ message: `Delete file: ${fileName}`, + fileName, + userId, + }) + const params = { + message, branch: BRANCH_REF, sha: fileSha, } @@ -218,7 +287,9 @@ class GitHubService { } } - async getRepoInfo({ accessToken, siteName }) { + async getRepoInfo(sessionData) { + const { siteName } = sessionData + const { accessToken } = sessionData const endpoint = `${siteName}` const headers = { Authorization: `token ${accessToken}`, @@ -235,7 +306,9 @@ class GitHubService { return data } - async getRepoState({ accessToken, siteName }) { + async getRepoState(sessionData) { + const { accessToken } = sessionData + const { siteName } = sessionData const endpoint = `${siteName}/commits` const headers = { Authorization: `token ${accessToken}`, @@ -259,7 +332,31 @@ class GitHubService { return { treeSha, currentCommitSha } } - async getTree({ accessToken, siteName, treeSha }, { isRecursive }) { + async getLatestCommitOfBranch(sessionData, branch) { + const { accessToken, siteName } = sessionData + const endpoint = `${siteName}/commits/${branch}` + const headers = { + Authorization: `token ${accessToken}`, + } + // Get the commits of the repo + try { + const { data: latestCommit } = await this.axiosInstance.get(endpoint, { + headers, + }) + const { commit: latestCommitMeta } = latestCommit + return latestCommitMeta + } catch (err) { + const { status } = err.response + if (status === 422) + throw new UnprocessableError(`Branch ${branch} does not exist`) + throw err + } + } + + async getTree(sessionData, githubSessionData, { isRecursive }) { + const { accessToken } = sessionData + const { siteName } = sessionData + const { treeSha } = githubSessionData.getGithubState() const url = `${siteName}/git/trees/${treeSha}` const params = { @@ -278,10 +375,9 @@ class GitHubService { return gitTree } - async updateTree( - { accessToken, currentCommitSha, treeSha, siteName }, - { gitTree, message } - ) { + async updateTree(sessionData, githubSessionData, { gitTree, message }) { + const { accessToken, siteName, isomerUserId: userId } = sessionData + const { treeSha, currentCommitSha } = githubSessionData.getGithubState() const url = `${siteName}/git/trees` const headers = { @@ -303,10 +399,14 @@ class GitHubService { const commitEndpoint = `${siteName}/git/commits` + const stringifiedMessage = JSON.stringify({ + message: message || `isomerCMS updated ${siteName} state`, + userId, + }) const newCommitResp = await this.axiosInstance.post( commitEndpoint, { - message: message || `isomerCMS updated ${siteName} state`, + message: stringifiedMessage, tree: newTreeSha, parents: [currentCommitSha], }, @@ -318,7 +418,9 @@ class GitHubService { return newCommitSha } - async updateRepoState({ accessToken, siteName }, { commitSha }) { + async updateRepoState(sessionData, { commitSha }) { + const { accessToken } = sessionData + const { siteName } = sessionData const refEndpoint = `${siteName}/git/refs/heads/${BRANCH_REF}` const headers = { Authorization: `token ${accessToken}`, @@ -331,7 +433,10 @@ class GitHubService { ) } - async checkHasAccess({ accessToken, siteName }, { userId }) { + async checkHasAccess(sessionData) { + const { accessToken } = sessionData + const userId = sessionData.githubId + const { siteName } = sessionData const endpoint = `${siteName}/collaborators/${userId}` const headers = { diff --git a/src/services/db/__tests__/GitHubService.spec.js b/src/services/db/__tests__/GitHubService.spec.js index 069453dc1..65846c1d7 100644 --- a/src/services/db/__tests__/GitHubService.spec.js +++ b/src/services/db/__tests__/GitHubService.spec.js @@ -1,24 +1,36 @@ const { ConflictError } = require("@errors/ConflictError") const { NotFoundError } = require("@errors/NotFoundError") +const { UnprocessableError } = require("@errors/UnprocessableError") const validateStatus = require("@utils/axios-utils") +const { + mockUserWithSiteSessionData, + mockSiteName, + mockAccessToken, + mockTreeSha, + mockGithubId, + mockCurrentCommitSha, + mockGithubSessionData, + mockIsomerUserId, +} = require("@fixtures/sessionData") const { GitHubService } = require("@services/db/GitHubService") const BRANCH_REF = "staging" describe("Github Service", () => { - const siteName = "test-site" - const accessToken = "test-token" + const siteName = mockSiteName + const accessToken = mockAccessToken const fileName = "test-file" const collectionName = "collection" const subcollectionName = "subcollection" const directoryName = `_${collectionName}` const sha = "12345" - const treeSha = "98765" + const treeSha = mockTreeSha const content = "test-content" + const userId = mockIsomerUserId - const reqDetails = { siteName, accessToken } + const sessionData = mockUserWithSiteSessionData const authHeader = { headers: { @@ -89,8 +101,13 @@ describe("Github Service", () => { const endpoint = `${siteName}/contents/${directoryName}/${fileName}` const encodedContent = Base64.encode(content) - const params = { + const message = JSON.stringify({ message: `Create file: ${fileName}`, + fileName, + userId, + }) + const params = { + message, content: encodedContent, branch: BRANCH_REF, } @@ -105,7 +122,7 @@ describe("Github Service", () => { } mockAxiosInstance.put.mockResolvedValueOnce(resp) await expect( - service.create(reqDetails, { + service.create(sessionData, { content, fileName, directoryName, @@ -130,7 +147,7 @@ describe("Github Service", () => { } mockAxiosInstance.put.mockResolvedValueOnce(resp) await expect( - service.create(reqDetails, { + service.create(sessionData, { content, fileName, directoryName, @@ -158,7 +175,7 @@ describe("Github Service", () => { throw err }) await expect( - service.create(reqDetails, { + service.create(sessionData, { content, fileName, directoryName, @@ -188,7 +205,7 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.read(reqDetails, { + service.read(sessionData, { fileName, directoryName, }) @@ -209,7 +226,7 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.read(reqDetails, { + service.read(sessionData, { fileName, directoryName, }) @@ -237,7 +254,7 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.readMedia(reqDetails, { + service.readMedia(sessionData, { fileSha: sha, }) ).resolves.toMatchObject({ @@ -257,7 +274,7 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.readMedia(reqDetails, { + service.readMedia(sessionData, { fileSha: sha, }) ).rejects.toThrowError(NotFoundError) @@ -282,7 +299,7 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.readDirectory(reqDetails, { + service.readDirectory(sessionData, { fileName, directoryName, }) @@ -300,7 +317,7 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.readDirectory(reqDetails, { + service.readDirectory(sessionData, { fileName, directoryName, }) @@ -316,8 +333,13 @@ describe("Github Service", () => { describe("Update", () => { const endpoint = `${siteName}/contents/${directoryName}/${fileName}` const encodedContent = Base64.encode(content) - const params = { + const message = JSON.stringify({ message: `Update file: ${fileName}`, + fileName, + userId, + }) + const params = { + message, content: encodedContent, branch: BRANCH_REF, sha, @@ -333,7 +355,7 @@ describe("Github Service", () => { } mockAxiosInstance.put.mockResolvedValueOnce(resp) await expect( - service.update(reqDetails, { + service.update(sessionData, { fileName, directoryName, fileContent: content, @@ -358,7 +380,7 @@ describe("Github Service", () => { throw err }) await expect( - service.update(reqDetails, { + service.update(sessionData, { fileName, directoryName, fileContent: content, @@ -392,7 +414,7 @@ describe("Github Service", () => { mockAxiosInstance.get.mockResolvedValueOnce(getResp) mockAxiosInstance.put.mockResolvedValueOnce(putResp) await expect( - service.update(reqDetails, { + service.update(sessionData, { fileName, directoryName, fileContent: content, @@ -421,7 +443,7 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.update(reqDetails, { + service.update(sessionData, { fileName, directoryName, fileContent: content, @@ -437,14 +459,19 @@ describe("Github Service", () => { describe("Delete", () => { const endpoint = `${siteName}/contents/${directoryName}/${fileName}` - const params = { + const message = JSON.stringify({ message: `Delete file: ${fileName}`, + fileName, + userId, + }) + const params = { + message, branch: BRANCH_REF, sha, } it("Deleting a file works correctly", async () => { - await service.delete(reqDetails, { + await service.delete(sessionData, { fileName, directoryName, sha, @@ -464,7 +491,7 @@ describe("Github Service", () => { throw err }) await expect( - service.delete(reqDetails, { + service.delete(sessionData, { fileName, directoryName, sha, @@ -486,14 +513,14 @@ describe("Github Service", () => { ref: BRANCH_REF, } - it("Getting a repo state works correctly", async () => { + it("Getting repo info works correctly", async () => { const resp = { data: { private: true, }, } mockAxiosInstance.get.mockResolvedValueOnce(resp) - await service.getRepoInfo(reqDetails) + await service.getRepoInfo(sessionData) expect(mockAxiosInstance.get).toHaveBeenCalledWith(endpoint, { params, headers, @@ -523,7 +550,7 @@ describe("Github Service", () => { ], } mockAxiosInstance.get.mockResolvedValueOnce(resp) - await service.getRepoState(reqDetails) + await service.getRepoState(sessionData) expect(mockAxiosInstance.get).toHaveBeenCalledWith(endpoint, { params, headers, @@ -531,8 +558,69 @@ describe("Github Service", () => { }) }) + describe("getLatestCommitOfBranch", () => { + const endpoint = `${siteName}/commits/staging` + const headers = { + Authorization: `token ${accessToken}`, + } + + it("Getting the latest commit of branch works correctly", async () => { + const expected = { + author: { + name: "test", + }, + } + const resp = { + data: { + commit: expected, + }, + } + mockAxiosInstance.get.mockResolvedValueOnce(resp) + const actual = await service.getLatestCommitOfBranch( + sessionData, + "staging" + ) + expect(actual).toEqual(expected) + expect(mockAxiosInstance.get).toHaveBeenCalledWith(endpoint, { + headers, + }) + }) + + it("Getting an invalid branch should throw UnprocessableError", async () => { + mockAxiosInstance.get.mockImplementationOnce(() => { + const err = new Error() + err.response = { + status: 422, + } + throw err + }) + await expect( + service.getLatestCommitOfBranch(sessionData, "staging") + ).rejects.toThrowError(UnprocessableError) + expect(mockAxiosInstance.get).toHaveBeenCalledWith(endpoint, { + headers, + }) + }) + + it("Getting other kinds of errors should throw the original error", async () => { + mockAxiosInstance.get.mockImplementationOnce(() => { + const err = new Error() + err.response = { + status: 418, + } + throw err + }) + await expect( + service.getLatestCommitOfBranch(sessionData, "staging") + ).rejects.toThrowError() + expect(mockAxiosInstance.get).toHaveBeenCalledWith(endpoint, { + headers, + }) + }) + }) + describe("GetTree", () => { - const url = `${siteName}/git/trees/${sha}` + const url = `${siteName}/git/trees/${treeSha}` const params = { ref: BRANCH_REF, @@ -544,7 +632,7 @@ describe("Github Service", () => { const tree = "test-tree" - it("Getting a repo state works correctly", async () => { + it("Getting a repo tree works correctly", async () => { const resp = { data: { tree, @@ -552,7 +640,7 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.getTree({ accessToken, siteName, treeSha: sha }, {}) + service.getTree(sessionData, mockGithubSessionData, {}) ).resolves.toEqual(tree) expect(mockAxiosInstance.get).toHaveBeenCalledWith(url, { params, @@ -568,10 +656,9 @@ describe("Github Service", () => { } mockAxiosInstance.get.mockResolvedValueOnce(resp) await expect( - service.getTree( - { accessToken, siteName, treeSha: sha }, - { isRecursive: true } - ) + service.getTree(sessionData, mockGithubSessionData, { + isRecursive: true, + }) ).resolves.toEqual(tree) expect(mockAxiosInstance.get).toHaveBeenCalledWith(url, { params: { @@ -592,6 +679,10 @@ describe("Github Service", () => { const secondSha = "second-sha" const gitTree = "git-tree" const message = "message" + const finalExpectedMessage = JSON.stringify({ + message, + userId, + }) const firstResp = { data: { sha: firstSha, @@ -607,10 +698,10 @@ describe("Github Service", () => { .mockResolvedValueOnce(firstResp) .mockResolvedValueOnce(secondResp) await expect( - service.updateTree( - { accessToken, siteName, currentCommitSha: sha, treeSha }, - { gitTree, message } - ) + service.updateTree(sessionData, mockGithubSessionData, { + gitTree, + message, + }) ).resolves.toEqual(secondSha) expect(mockAxiosInstance.post).toHaveBeenCalledWith( url, @@ -623,9 +714,9 @@ describe("Github Service", () => { expect(mockAxiosInstance.post).toHaveBeenCalledWith( commitEndpoint, { - message: message || `isomerCMS updated ${siteName} state`, + message: finalExpectedMessage, tree: firstSha, - parents: [sha], + parents: [mockCurrentCommitSha], }, authHeader ) @@ -636,10 +727,7 @@ describe("Github Service", () => { const refEndpoint = `${siteName}/git/refs/heads/${BRANCH_REF}` it("Updating a repo state works correctly", async () => { - await service.updateRepoState( - { accessToken, siteName }, - { commitSha: sha } - ) + await service.updateRepoState(sessionData, { commitSha: sha }) expect(mockAxiosInstance.patch).toHaveBeenCalledWith( refEndpoint, { sha, force: true }, @@ -649,14 +737,13 @@ describe("Github Service", () => { }) describe("checkHasAccess", () => { - const userId = "userId" - const refEndpoint = `${siteName}/collaborators/${userId}` + const refEndpoint = `${siteName}/collaborators/${mockGithubId}` const headers = { Authorization: `token ${accessToken}`, "Content-Type": "application/json", } it("Checks whether user has write access to site", async () => { - await service.checkHasAccess({ accessToken, siteName }, { userId }) + await service.checkHasAccess(sessionData) expect(mockAxiosInstance.get).toHaveBeenCalledWith(refEndpoint, { headers, }) diff --git a/src/services/db/review.ts b/src/services/db/review.ts new file mode 100644 index 000000000..ee4d2a8ba --- /dev/null +++ b/src/services/db/review.ts @@ -0,0 +1,146 @@ +import _ from "lodash" + +import { config } from "@config/config" + +import { + RawFileChangeInfo, + Commit, + RawPullRequest, + RawComment, + fromGithubCommitMessage, +} from "@root/types/github" + +import { isomerRepoAxiosInstance as axiosInstance } from "../api/AxiosInstance" + +const E2E_TEST_GH_TOKEN = config.get("cypress.e2eTestGithubToken") + +export const getCommitDiff = async ( + siteName: string, + base = "master", + head = "staging" +) => + axiosInstance + .get<{ files: RawFileChangeInfo[]; commits: Commit[] }>( + `${siteName}/compare/${base}...${head}` + ) + .then(({ data }) => data) + +export const createPullRequest = ( + siteName: string, + title: string, + description?: string, + base = "master", + head = "staging" +) => + axiosInstance + .post<{ number: number }>( + `${siteName}/pulls`, + // NOTE: only create body if a valid description is given + { title, base, head, ...(description && { body: description }) } + ) + .then(({ data }) => data.number) + +export const getPullRequest = (siteName: string, pullRequestNumber: number) => + axiosInstance + .get(`${siteName}/pulls/${pullRequestNumber}`) + .then(({ data }) => data) + +export const updatePullRequest = ( + siteName: string, + pullRequestNumber: number, + title: string, + description?: string +) => + axiosInstance.patch( + `${siteName}/pulls/${pullRequestNumber}`, + // NOTE: only create body if a valid description is given + { title, ...(description !== undefined && { body: description }) } + ) + +export const closeReviewRequest = ( + siteName: string, + pullRequestNumber: number +) => + axiosInstance.patch( + `${siteName}/pulls/${pullRequestNumber}`, + // NOTE: only create body if a valid description is given + { state: "closed" } + ) + +export const mergePullRequest = (siteName: string, pullRequestNumber: number) => + axiosInstance.put(`${siteName}/pulls/${pullRequestNumber}/merge`) + +export const approvePullRequest = ( + siteName: string, + pullRequestNumber: number +) => + axiosInstance.post( + `${siteName}/pulls/${pullRequestNumber}/reviews`, + { + event: "APPROVE", + }, + { + headers: { + // NOTE: This is currently done because + // we have a lock on the master branch + // and github requires an approval from + // *another* account that is not the creator + // of the pull request. + // This is a temporary workaround until we + // write a migration script to remove the lock on master. + // TODO!: Remove this + Authorization: `token ${E2E_TEST_GH_TOKEN}`, + }, + } + ) + +export const getComments = async ( + siteName: string, + pullRequestNumber: number +) => { + const rawComments = await axiosInstance + .get(`${siteName}/issues/${pullRequestNumber}/comments`) + .then(({ data }) => data) + return _.compact( + rawComments.map((rawComment) => { + const commentData = fromGithubCommitMessage(rawComment.body) + if (_.isEmpty(commentData)) return null // Will be filtered out by _.compact + const { userId, message } = commentData + if (!userId || !message) return null // Will be filtered out by _.compact + return { + userId, + message, + createdAt: rawComment.created_at, + } + }) + ) +} + +export const createComment = async ( + siteName: string, + pullRequestNumber: number, + userId: string, + message: string +) => { + const stringifiedMessage = JSON.stringify({ + userId, + message, + }) + return axiosInstance.post( + `${siteName}/issues/${pullRequestNumber}/comments`, + { body: stringifiedMessage } + ) +} + +export const getBlob = async ( + repo: string, + path: string, + ref: string +): Promise => + axiosInstance + .get(`${repo}/contents/${path}?ref=${ref}`, { + headers: { + Accept: "application/vnd.github.raw", + }, + }) + .then(({ data }) => data) diff --git a/src/services/directoryServices/BaseDirectoryService.js b/src/services/directoryServices/BaseDirectoryService.js index 1f4daa507..35a63a81a 100644 --- a/src/services/directoryServices/BaseDirectoryService.js +++ b/src/services/directoryServices/BaseDirectoryService.js @@ -8,8 +8,8 @@ class BaseDirectoryService { this.gitHubService = gitHubService } - async list(reqDetails, { directoryName }) { - const directoryData = await this.gitHubService.readDirectory(reqDetails, { + async list(sessionData, { directoryName }) { + const directoryData = await this.gitHubService.readDirectory(sessionData, { directoryName, }) @@ -27,10 +27,18 @@ class BaseDirectoryService { return _.compact(filesOrDirs) } - async rename(reqDetails, { oldDirectoryName, newDirectoryName, message }) { - const gitTree = await this.gitHubService.getTree(reqDetails, { - isRecursive: true, - }) + async rename( + sessionData, + githubSessionData, + { oldDirectoryName, newDirectoryName, message } + ) { + const gitTree = await this.gitHubService.getTree( + sessionData, + githubSessionData, + { + isRecursive: true, + } + ) const newGitTree = [] @@ -55,19 +63,27 @@ class BaseDirectoryService { } }) - const newCommitSha = await this.gitHubService.updateTree(reqDetails, { - gitTree: newGitTree, - message, - }) - await this.gitHubService.updateRepoState(reqDetails, { + const newCommitSha = await this.gitHubService.updateTree( + sessionData, + githubSessionData, + { + gitTree: newGitTree, + message, + } + ) + await this.gitHubService.updateRepoState(sessionData, { commitSha: newCommitSha, }) } - async delete(reqDetails, { directoryName, message }) { - const gitTree = await this.gitHubService.getTree(reqDetails, { - isRecursive: true, - }) + async delete(sessionData, githubSessionData, { directoryName, message }) { + const gitTree = await this.gitHubService.getTree( + sessionData, + githubSessionData, + { + isRecursive: true, + } + ) // Retrieve removed items and set their sha to null const newGitTree = gitTree @@ -80,23 +96,32 @@ class BaseDirectoryService { sha: null, })) - const newCommitSha = await this.gitHubService.updateTree(reqDetails, { - gitTree: newGitTree, - message, - }) - await this.gitHubService.updateRepoState(reqDetails, { + const newCommitSha = await this.gitHubService.updateTree( + sessionData, + githubSessionData, + { + gitTree: newGitTree, + message, + } + ) + await this.gitHubService.updateRepoState(sessionData, { commitSha: newCommitSha, }) } // Move files which do not require modification of content async moveFiles( - reqDetails, + sessionData, + githubSessionData, { oldDirectoryName, newDirectoryName, targetFiles, message } ) { - const gitTree = await this.gitHubService.getTree(reqDetails, { - isRecursive: true, - }) + const gitTree = await this.gitHubService.getTree( + sessionData, + githubSessionData, + { + isRecursive: true, + } + ) const newGitTree = [] gitTree.forEach((item) => { if ( @@ -107,7 +132,6 @@ class BaseDirectoryService { .split(`${newDirectoryName}/`) .slice(1) .join(`${newDirectoryName}/`) - console.log(fileName) if (targetFiles.includes(fileName)) { // Conflicting file throw new ConflictError("File already exists in target directory") @@ -136,11 +160,15 @@ class BaseDirectoryService { } }) - const newCommitSha = await this.gitHubService.updateTree(reqDetails, { - gitTree: newGitTree, - message, - }) - await this.gitHubService.updateRepoState(reqDetails, { + const newCommitSha = await this.gitHubService.updateTree( + sessionData, + githubSessionData, + { + gitTree: newGitTree, + message, + } + ) + await this.gitHubService.updateRepoState(sessionData, { commitSha: newCommitSha, }) } diff --git a/src/services/directoryServices/CollectionDirectoryService.js b/src/services/directoryServices/CollectionDirectoryService.js index 0f5653a13..5ae6bc003 100644 --- a/src/services/directoryServices/CollectionDirectoryService.js +++ b/src/services/directoryServices/CollectionDirectoryService.js @@ -85,8 +85,8 @@ class CollectionDirectoryService { return fileOrder } - async listAllCollections(reqDetails) { - const filesOrDirs = await this.baseDirectoryService.list(reqDetails, { + async listAllCollections(sessionData) { + const filesOrDirs = await this.baseDirectoryService.list(sessionData, { directoryName: "", }) return filesOrDirs.reduce((acc, curr) => { @@ -103,15 +103,15 @@ class CollectionDirectoryService { }, []) } - async listFiles(reqDetails, { collectionName }) { - const files = await this.collectionYmlService.listContents(reqDetails, { + async listFiles(sessionData, { collectionName }) { + const files = await this.collectionYmlService.listContents(sessionData, { collectionName, }) return this.convertYmlToObjOrder(files) } - async createDirectory(reqDetails, { collectionName, objArray }) { + async createDirectory(sessionData, { collectionName, objArray }) { if (ISOMER_TEMPLATE_PROTECTED_DIRS.includes(collectionName)) throw new ConflictError(protectedFolderConflictErrorMsg(collectionName)) if (/[^a-zA-Z0-9- ]/g.test(collectionName)) { @@ -121,7 +121,7 @@ class CollectionDirectoryService { ) } const slugifiedCollectionName = slugifyCollectionName(collectionName) - await this.collectionYmlService.create(reqDetails, { + await this.collectionYmlService.create(sessionData, { collectionName: slugifiedCollectionName, }) if (objArray) { @@ -129,7 +129,7 @@ class CollectionDirectoryService { // We can't perform these operations concurrently because of conflict issues /* eslint-disable no-await-in-loop, no-restricted-syntax */ for (const fileName of orderArray) { - await this.moverService.movePage(reqDetails, { + await this.moverService.movePage(sessionData, { fileName, newFileCollection: slugifiedCollectionName, }) @@ -141,7 +141,11 @@ class CollectionDirectoryService { } } - async renameDirectory(reqDetails, { collectionName, newDirectoryName }) { + async renameDirectory( + sessionData, + githubSessionData, + { collectionName, newDirectoryName } + ) { if (/[^a-zA-Z0-9- ]/g.test(newDirectoryName)) { // Contains non-allowed characters throw new BadRequestError( @@ -151,36 +155,36 @@ class CollectionDirectoryService { if (ISOMER_TEMPLATE_PROTECTED_DIRS.includes(newDirectoryName)) throw new ConflictError(protectedFolderConflictErrorMsg(newDirectoryName)) const slugifiedNewCollectionName = slugifyCollectionName(newDirectoryName) - await this.baseDirectoryService.rename(reqDetails, { + await this.baseDirectoryService.rename(sessionData, githubSessionData, { oldDirectoryName: `_${collectionName}`, newDirectoryName: `_${slugifiedNewCollectionName}`, message: `Renaming collection ${collectionName} to ${slugifiedNewCollectionName}`, }) - await this.collectionYmlService.renameCollectionInOrder(reqDetails, { + await this.collectionYmlService.renameCollectionInOrder(sessionData, { oldCollectionName: collectionName, newCollectionName: slugifiedNewCollectionName, }) - await this.navYmlService.renameCollectionInNav(reqDetails, { + await this.navYmlService.renameCollectionInNav(sessionData, { oldCollectionName: collectionName, newCollectionName: slugifiedNewCollectionName, }) } - async deleteDirectory(reqDetails, { collectionName }) { + async deleteDirectory(sessionData, githubSessionData, { collectionName }) { if (ISOMER_TEMPLATE_PROTECTED_DIRS.includes(collectionName)) throw new ConflictError(protectedFolderConflictErrorMsg(collectionName)) - await this.baseDirectoryService.delete(reqDetails, { + await this.baseDirectoryService.delete(sessionData, githubSessionData, { directoryName: `_${collectionName}`, message: `Deleting collection ${collectionName}`, }) - await this.navYmlService.deleteCollectionInNav(reqDetails, { + await this.navYmlService.deleteCollectionInNav(sessionData, { collectionName, }) } - async reorderDirectory(reqDetails, { collectionName, objArray }) { + async reorderDirectory(sessionData, { collectionName, objArray }) { const fileOrder = this.convertObjToYmlOrder(objArray) - await this.collectionYmlService.updateOrder(reqDetails, { + await this.collectionYmlService.updateOrder(sessionData, { collectionName, newOrder: fileOrder, }) @@ -188,14 +192,14 @@ class CollectionDirectoryService { } async movePages( - reqDetails, + sessionData, { collectionName, targetCollectionName, targetSubcollectionName, objArray } ) { // We can't perform these operations concurrently because of conflict issues /* eslint-disable no-await-in-loop, no-restricted-syntax */ for (const file of objArray) { const fileName = file.name - await this.moverService.movePage(reqDetails, { + await this.moverService.movePage(sessionData, { fileName, oldFileCollection: collectionName, newFileCollection: targetCollectionName, diff --git a/src/services/directoryServices/MediaDirectoryService.js b/src/services/directoryServices/MediaDirectoryService.js index afdcdc96d..4cca8f777 100644 --- a/src/services/directoryServices/MediaDirectoryService.js +++ b/src/services/directoryServices/MediaDirectoryService.js @@ -1,6 +1,8 @@ +const { config } = require("@config/config") + const { BadRequestError } = require("@errors/BadRequestError") -const { GITHUB_ORG_NAME } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") const PLACEHOLDER_FILE_NAME = ".keep" @@ -16,10 +18,10 @@ class MediaDirectoryService { * Lists files in directory. Returns empty array if directory does not exist * - useful for base media directories which do not have placeholder files */ - async listWithDefault(reqDetails, { directoryName }) { + async listWithDefault(sessionData, { directoryName }) { let files = [] try { - const retrievedFiles = await this.baseDirectoryService.list(reqDetails, { + const retrievedFiles = await this.baseDirectoryService.list(sessionData, { directoryName, }) files = retrievedFiles @@ -30,16 +32,15 @@ class MediaDirectoryService { return files } - async listFiles(reqDetails, { directoryName }) { - // TODO: file preview handling - const { siteName } = reqDetails + async listFiles(sessionData, { directoryName }) { + const { siteName } = sessionData if (!isMediaPathValid({ path: directoryName })) throw new BadRequestError("Invalid media folder name") const mediaType = directoryName.split("/")[0] const { private: isPrivate } = await this.gitHubService.getRepoInfo( - reqDetails + sessionData ) - const files = await this.listWithDefault(reqDetails, { directoryName }) + const files = await this.listWithDefault(sessionData, { directoryName }) const resp = [] for (const curr of files) { @@ -64,7 +65,7 @@ class MediaDirectoryService { // Generate blob url const imageExt = curr.name.slice(curr.name.lastIndexOf(".") + 1) const contentType = `image/${imageExt === "svg" ? "svg+xml" : imageExt}` - const { content } = await this.gitHubService.readMedia(reqDetails, { + const { content } = await this.gitHubService.readMedia(sessionData, { fileSha: curr.sha, }) const blobURL = `data:${contentType};base64,${content}` @@ -75,7 +76,11 @@ class MediaDirectoryService { return resp } - async createMediaDirectory(reqDetails, { directoryName, objArray }) { + async createMediaDirectory( + sessionData, + githubSessionData, + { directoryName, objArray } + ) { if (!isMediaPathValid({ path: directoryName })) throw new BadRequestError( "Special characters not allowed in media folder name" @@ -90,16 +95,20 @@ class MediaDirectoryService { const pathTokens = directoryName.split("/") const oldDirectoryName = pathTokens.slice(0, -1).join("/") const targetFiles = objArray.map((file) => file.name) - await this.baseDirectoryService.moveFiles(reqDetails, { - oldDirectoryName, - newDirectoryName: directoryName, - targetFiles, - message: `Moving media files from ${oldDirectoryName} to ${directoryName}`, - }) + await this.baseDirectoryService.moveFiles( + sessionData, + githubSessionData, + { + oldDirectoryName, + newDirectoryName: directoryName, + targetFiles, + message: `Moving media files from ${oldDirectoryName} to ${directoryName}`, + } + ) } // We do this step later because the git tree operation overrides it otherwise - await this.gitHubService.create(reqDetails, { + await this.gitHubService.create(sessionData, { content: "", fileName: PLACEHOLDER_FILE_NAME, directoryName, @@ -110,29 +119,38 @@ class MediaDirectoryService { } } - async renameMediaDirectory(reqDetails, { directoryName, newDirectoryName }) { + async renameMediaDirectory( + sessionData, + githubSessionData, + { directoryName, newDirectoryName } + ) { if (!isMediaPathValid({ path: newDirectoryName })) throw new BadRequestError( "Special characters not allowed in media folder name" ) - await this.baseDirectoryService.rename(reqDetails, { + await this.baseDirectoryService.rename(sessionData, githubSessionData, { oldDirectoryName: directoryName, newDirectoryName, message: `Renaming media folder ${directoryName} to ${newDirectoryName}`, }) } - async deleteMediaDirectory(reqDetails, { directoryName }) { + async deleteMediaDirectory( + sessionData, + githubSessionData, + { directoryName } + ) { if (!isMediaPathValid({ path: directoryName })) throw new BadRequestError("Invalid media folder name") - await this.baseDirectoryService.delete(reqDetails, { + await this.baseDirectoryService.delete(sessionData, githubSessionData, { directoryName, message: `Deleting media folder ${directoryName}`, }) } async moveMediaFiles( - reqDetails, + sessionData, + githubSessionData, { directoryName, targetDirectoryName, objArray } ) { if ( @@ -144,7 +162,7 @@ class MediaDirectoryService { ) const targetFiles = objArray.map((item) => item.name) - await this.baseDirectoryService.moveFiles(reqDetails, { + await this.baseDirectoryService.moveFiles(sessionData, githubSessionData, { oldDirectoryName: directoryName, newDirectoryName: targetDirectoryName, targetFiles, diff --git a/src/services/directoryServices/ResourceDirectoryService.js b/src/services/directoryServices/ResourceDirectoryService.js index cddd699b0..ee218a55e 100644 --- a/src/services/directoryServices/ResourceDirectoryService.js +++ b/src/services/directoryServices/ResourceDirectoryService.js @@ -19,9 +19,9 @@ class ResourceDirectoryService { return `${resourceRoomName}/${resourceCategoryName}` } - async listFiles(reqDetails, { resourceRoomName, resourceCategoryName }) { + async listFiles(sessionData, { resourceRoomName, resourceCategoryName }) { const resourceCategories = await this.baseDirectoryService.list( - reqDetails, + sessionData, { directoryName: `${resourceRoomName}`, } @@ -34,7 +34,7 @@ class ResourceDirectoryService { throw new NotFoundError("Resource category does not exist") let files = [] try { - files = await this.baseDirectoryService.list(reqDetails, { + files = await this.baseDirectoryService.list(sessionData, { directoryName: `${this.getResourceDirectoryPath({ resourceRoomName, resourceCategoryName, @@ -76,7 +76,7 @@ class ResourceDirectoryService { } async createResourceDirectory( - reqDetails, + sessionData, { resourceRoomName, resourceCategoryName } ) { if (/[^a-zA-Z0-9- ]/g.test(resourceCategoryName)) { @@ -93,7 +93,7 @@ class ResourceDirectoryService { title: resourceCategoryName, } const newContent = convertDataToMarkdown(frontMatter, "") - await this.gitHubService.create(reqDetails, { + await this.gitHubService.create(sessionData, { content: newContent, fileName: INDEX_FILE_NAME, directoryName: this.getResourceDirectoryPath({ @@ -107,7 +107,8 @@ class ResourceDirectoryService { } async renameResourceDirectory( - reqDetails, + sessionData, + githubSessionData, { resourceRoomName, resourceCategoryName, newDirectoryName } ) { if (/[^a-zA-Z0-9- ]/g.test(newDirectoryName)) { @@ -124,7 +125,7 @@ class ResourceDirectoryService { newDirectoryName ) const { content: rawContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName: INDEX_FILE_NAME, directoryName: oldDirectoryName, @@ -138,12 +139,12 @@ class ResourceDirectoryService { resourceCategoryName: slugifiedNewResourceCategoryName, }) - await this.baseDirectoryService.rename(reqDetails, { + await this.baseDirectoryService.rename(sessionData, githubSessionData, { oldDirectoryName, newDirectoryName: newDirectoryPath, message: `Renaming resource category ${resourceCategoryName} to ${slugifiedNewResourceCategoryName}`, }) - await this.gitHubService.update(reqDetails, { + await this.gitHubService.update(sessionData, { fileContent: newContent, sha, fileName: INDEX_FILE_NAME, @@ -152,10 +153,11 @@ class ResourceDirectoryService { } async deleteResourceDirectory( - reqDetails, + sessionData, + githubSessionData, { resourceRoomName, resourceCategoryName } ) { - await this.baseDirectoryService.delete(reqDetails, { + await this.baseDirectoryService.delete(sessionData, githubSessionData, { directoryName: this.getResourceDirectoryPath({ resourceRoomName, resourceCategoryName, @@ -165,7 +167,8 @@ class ResourceDirectoryService { } async moveResourcePages( - reqDetails, + sessionData, + githubSessionData, { resourceRoomName, resourceCategoryName, targetResourceCategory, objArray } ) { const targetFiles = objArray.map((item) => item.name) @@ -177,7 +180,7 @@ class ResourceDirectoryService { resourceRoomName, resourceCategoryName: targetResourceCategory, })}/_posts` - await this.baseDirectoryService.moveFiles(reqDetails, { + await this.baseDirectoryService.moveFiles(sessionData, githubSessionData, { oldDirectoryName, newDirectoryName, targetFiles, diff --git a/src/services/directoryServices/ResourceRoomDirectoryService.js b/src/services/directoryServices/ResourceRoomDirectoryService.js index b6b79278c..009f0c04d 100644 --- a/src/services/directoryServices/ResourceRoomDirectoryService.js +++ b/src/services/directoryServices/ResourceRoomDirectoryService.js @@ -16,8 +16,8 @@ class ResourceRoomDirectoryService { this.gitHubService = gitHubService } - async listAllResourceCategories(reqDetails, { resourceRoomName }) { - const filesOrDirs = await this.baseDirectoryService.list(reqDetails, { + async listAllResourceCategories(sessionData, { resourceRoomName }) { + const filesOrDirs = await this.baseDirectoryService.list(sessionData, { directoryName: `${resourceRoomName}`, }) return filesOrDirs.reduce((acc, curr) => { @@ -30,8 +30,8 @@ class ResourceRoomDirectoryService { }, []) } - async getResourceRoomDirectoryName(reqDetails) { - const config = await this.configYmlService.read(reqDetails) + async getResourceRoomDirectoryName(sessionData) { + const config = await this.configYmlService.read(sessionData) return { resourceRoomName: config.content.resources_name ? config.content.resources_name @@ -39,7 +39,7 @@ class ResourceRoomDirectoryService { } } - async createResourceRoomDirectory(reqDetails, { resourceRoomName }) { + async createResourceRoomDirectory(sessionData, { resourceRoomName }) { if (/[^a-zA-Z0-9- ]/g.test(resourceRoomName)) { // Contains non-allowed characters throw new BadRequestError( @@ -48,13 +48,13 @@ class ResourceRoomDirectoryService { } const slugifiedResourceRoomName = slugifyCollectionName(resourceRoomName) const { content: configContent, sha } = await this.configYmlService.read( - reqDetails + sessionData ) // If resource room already exists, throw error if ("resources_name" in configContent) throw new ConflictError("Resource room already exists") configContent.resources_name = slugifiedResourceRoomName - await this.configYmlService.update(reqDetails, { + await this.configYmlService.update(sessionData, { fileContent: configContent, sha, }) @@ -63,7 +63,7 @@ class ResourceRoomDirectoryService { title: resourceRoomName, } const newContent = convertDataToMarkdown(frontMatter, "") - await this.gitHubService.create(reqDetails, { + await this.gitHubService.create(sessionData, { content: newContent, fileName: INDEX_FILE_NAME, directoryName: slugifiedResourceRoomName, @@ -74,7 +74,8 @@ class ResourceRoomDirectoryService { } async renameResourceRoomDirectory( - reqDetails, + sessionData, + githubSessionData, { resourceRoomName, newDirectoryName } ) { if (/[^a-zA-Z0-9- ]/g.test(newDirectoryName)) { @@ -86,7 +87,7 @@ class ResourceRoomDirectoryService { const slugifiedNewResourceRoomName = slugifyCollectionName(newDirectoryName) const { content: rawContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName: INDEX_FILE_NAME, directoryName: resourceRoomName, @@ -96,12 +97,12 @@ class ResourceRoomDirectoryService { frontMatter.title = newDirectoryName const newContent = convertDataToMarkdown(frontMatter, pageContent) - await this.baseDirectoryService.rename(reqDetails, { + await this.baseDirectoryService.rename(sessionData, githubSessionData, { oldDirectoryName: resourceRoomName, newDirectoryName: slugifiedNewResourceRoomName, message: `Renaming resource room from ${resourceRoomName} to ${slugifiedNewResourceRoomName}`, }) - await this.gitHubService.update(reqDetails, { + await this.gitHubService.update(sessionData, { fileContent: newContent, sha, fileName: INDEX_FILE_NAME, @@ -111,9 +112,9 @@ class ResourceRoomDirectoryService { const { content: configContent, sha: configSha, - } = await this.configYmlService.read(reqDetails) + } = await this.configYmlService.read(sessionData) configContent.resources_name = slugifiedNewResourceRoomName - await this.configYmlService.update(reqDetails, { + await this.configYmlService.update(sessionData, { fileContent: configContent, sha: configSha, }) @@ -122,17 +123,21 @@ class ResourceRoomDirectoryService { } } - async deleteResourceRoomDirectory(reqDetails, { resourceRoomName }) { - await this.baseDirectoryService.delete(reqDetails, { + async deleteResourceRoomDirectory( + sessionData, + githubSessionData, + { resourceRoomName } + ) { + await this.baseDirectoryService.delete(sessionData, githubSessionData, { directoryName: resourceRoomName, message: `Deleting resource room ${resourceRoomName}`, }) const { content: configContent, sha } = await this.configYmlService.read( - reqDetails + sessionData ) delete configContent.resources_name - await this.configYmlService.update(reqDetails, { + await this.configYmlService.update(sessionData, { fileContent: configContent, sha, }) diff --git a/src/services/directoryServices/SubcollectionDirectoryService.js b/src/services/directoryServices/SubcollectionDirectoryService.js index 30a5a6002..93dceb812 100644 --- a/src/services/directoryServices/SubcollectionDirectoryService.js +++ b/src/services/directoryServices/SubcollectionDirectoryService.js @@ -123,9 +123,13 @@ class SubcollectionDirectoryService { }) } - async deleteDirectory(reqDetails, { collectionName, subcollectionName }) { + async deleteDirectory( + reqDetails, + githubSessionData, + { collectionName, subcollectionName } + ) { const dir = `_${collectionName}/${subcollectionName}` - await this.baseDirectoryService.delete(reqDetails, { + await this.baseDirectoryService.delete(reqDetails, githubSessionData, { directoryName: dir, message: `Deleting subcollection ${collectionName}/${subcollectionName}`, }) diff --git a/src/services/directoryServices/UnlinkedPagesDirectoryService.js b/src/services/directoryServices/UnlinkedPagesDirectoryService.js index 19f849fff..a46520b2b 100644 --- a/src/services/directoryServices/UnlinkedPagesDirectoryService.js +++ b/src/services/directoryServices/UnlinkedPagesDirectoryService.js @@ -6,8 +6,8 @@ class UnlinkedPagesDirectoryService { this.moverService = moverService } - async listAllUnlinkedPages(reqDetails) { - const filesOrDirs = await this.baseDirectoryService.list(reqDetails, { + async listAllUnlinkedPages(sessionData) { + const filesOrDirs = await this.baseDirectoryService.list(sessionData, { directoryName: UNLINKED_PAGE_DIRECTORY_NAME, }) return filesOrDirs.reduce((acc, curr) => { @@ -21,14 +21,14 @@ class UnlinkedPagesDirectoryService { } async movePages( - reqDetails, + sessionData, { targetCollectionName, targetSubcollectionName, objArray } ) { // We can't perform these operations concurrently because of conflict issues /* eslint-disable no-await-in-loop, no-restricted-syntax */ for (const file of objArray) { const fileName = file.name - await this.moverService.movePage(reqDetails, { + await this.moverService.movePage(sessionData, { fileName, newFileCollection: targetCollectionName, newFileSubcollection: targetSubcollectionName, diff --git a/src/services/directoryServices/__tests__/BaseDirectoryService.spec.js b/src/services/directoryServices/__tests__/BaseDirectoryService.spec.js index 6db196922..a539aae53 100644 --- a/src/services/directoryServices/__tests__/BaseDirectoryService.spec.js +++ b/src/services/directoryServices/__tests__/BaseDirectoryService.spec.js @@ -10,6 +10,7 @@ describe("Base Directory Service", () => { const message = "message" const currentCommitSha = "98765" const treeSha = "00000" + const mockGithubSessionData = "mockData" const mockedTree = [ { @@ -145,33 +146,45 @@ describe("Base Directory Service", () => { ]) it("Renaming a directory to one with an existing name throws an error", async () => { await expect( - service.rename(reqDetails, { + service.rename(reqDetails, mockGithubSessionData, { oldDirectoryName: directoryName, newDirectoryName: renamedDir, message, }) ).rejects.toThrowError(ConflictError) - expect(mockGithubService.getTree).toHaveBeenCalledWith(reqDetails, { - isRecursive: true, - }) + expect(mockGithubService.getTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + isRecursive: true, + } + ) }) mockGithubService.getTree.mockResolvedValueOnce(mockedTree) mockGithubService.updateTree.mockResolvedValueOnce(sha) it("Renaming directories works correctly", async () => { await expect( - service.rename(reqDetails, { + service.rename(reqDetails, mockGithubSessionData, { oldDirectoryName: directoryName, newDirectoryName: renamedDir, message, }) ).resolves.not.toThrow() - expect(mockGithubService.getTree).toHaveBeenCalledWith(reqDetails, { - isRecursive: true, - }) - expect(mockGithubService.updateTree).toHaveBeenCalledWith(reqDetails, { - gitTree: mockedRenamedTree, - message, - }) + expect(mockGithubService.getTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + isRecursive: true, + } + ) + expect(mockGithubService.updateTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + gitTree: mockedRenamedTree, + message, + } + ) expect(mockGithubService.updateRepoState).toHaveBeenCalledWith( reqDetails, { @@ -208,18 +221,26 @@ describe("Base Directory Service", () => { mockGithubService.updateTree.mockResolvedValueOnce(sha) it("Deleting directories works correctly", async () => { await expect( - service.delete(reqDetails, { + service.delete(reqDetails, mockGithubSessionData, { directoryName, message, }) ).resolves.not.toThrow() - expect(mockGithubService.getTree).toHaveBeenCalledWith(reqDetails, { - isRecursive: true, - }) - expect(mockGithubService.updateTree).toHaveBeenCalledWith(reqDetails, { - gitTree: mockedDeletedTree, - message, - }) + expect(mockGithubService.getTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + isRecursive: true, + } + ) + expect(mockGithubService.updateTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + gitTree: mockedDeletedTree, + message, + } + ) expect(mockGithubService.updateRepoState).toHaveBeenCalledWith( reqDetails, { @@ -260,35 +281,47 @@ describe("Base Directory Service", () => { ]) it("Moving files to a directory which has a file of the same name throws an error", async () => { await expect( - service.moveFiles(reqDetails, { + service.moveFiles(reqDetails, mockGithubSessionData, { oldDirectoryName: `${directoryName}/${subcollectionName}`, newDirectoryName: targetDir, targetFiles: ["file.md", "file2.md"], message, }) ).rejects.toThrowError(ConflictError) - expect(mockGithubService.getTree).toHaveBeenCalledWith(reqDetails, { - isRecursive: true, - }) + expect(mockGithubService.getTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + isRecursive: true, + } + ) }) mockGithubService.getTree.mockResolvedValueOnce(mockedTree) mockGithubService.updateTree.mockResolvedValueOnce(sha) it("Moving files in directories works correctly", async () => { await expect( - service.moveFiles(reqDetails, { + service.moveFiles(reqDetails, mockGithubSessionData, { oldDirectoryName: `${directoryName}/${subcollectionName}`, newDirectoryName: targetDir, targetFiles: ["file.md", "file2.md"], message, }) ).resolves.not.toThrow() - expect(mockGithubService.getTree).toHaveBeenCalledWith(reqDetails, { - isRecursive: true, - }) - expect(mockGithubService.updateTree).toHaveBeenCalledWith(reqDetails, { - gitTree: mockedMovedTree, - message, - }) + expect(mockGithubService.getTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + isRecursive: true, + } + ) + expect(mockGithubService.updateTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + gitTree: mockedMovedTree, + message, + } + ) expect(mockGithubService.updateRepoState).toHaveBeenCalledWith( reqDetails, { diff --git a/src/services/directoryServices/__tests__/CollectionDirectoryService.spec.js b/src/services/directoryServices/__tests__/CollectionDirectoryService.spec.js index 019624449..e59ae1a62 100644 --- a/src/services/directoryServices/__tests__/CollectionDirectoryService.spec.js +++ b/src/services/directoryServices/__tests__/CollectionDirectoryService.spec.js @@ -5,6 +5,7 @@ describe("Collection Directory Service", () => { const siteName = "test-site" const accessToken = "test-token" const collectionName = "collection" + const mockGithubSessionData = "mockData" const objArray = [ { @@ -252,7 +253,7 @@ describe("Collection Directory Service", () => { const newDirectoryName = "new-dir" it("rejects renaming to a collection with the same name as protected folders", async () => { await expect( - service.renameDirectory(reqDetails, { + service.renameDirectory(reqDetails, mockGithubSessionData, { collectionName, newDirectoryName: "files", }) @@ -261,7 +262,7 @@ describe("Collection Directory Service", () => { it("rejects collections with special characters", async () => { await expect( - service.renameDirectory(reqDetails, { + service.renameDirectory(reqDetails, mockGithubSessionData, { collectionName, newDirectoryName: "dir/dir", }) @@ -270,16 +271,20 @@ describe("Collection Directory Service", () => { it("Renaming a collection works correctly", async () => { await expect( - service.renameDirectory(reqDetails, { + service.renameDirectory(reqDetails, mockGithubSessionData, { collectionName, newDirectoryName, }) ).resolves.not.toThrowError() - expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith(reqDetails, { - oldDirectoryName: `_${collectionName}`, - newDirectoryName: `_${newDirectoryName}`, - message: `Renaming collection ${collectionName} to ${newDirectoryName}`, - }) + expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + oldDirectoryName: `_${collectionName}`, + newDirectoryName: `_${newDirectoryName}`, + message: `Renaming collection ${collectionName} to ${newDirectoryName}`, + } + ) expect( mockCollectionYmlService.renameCollectionInOrder ).toHaveBeenCalledWith(reqDetails, { @@ -298,16 +303,20 @@ describe("Collection Directory Service", () => { const originalCollectionName = "Test Collection" const slugifiedCollectionName = "test-collection" await expect( - service.renameDirectory(reqDetails, { + service.renameDirectory(reqDetails, mockGithubSessionData, { collectionName, newDirectoryName: originalCollectionName, }) ).resolves.not.toThrowError() - expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith(reqDetails, { - oldDirectoryName: `_${collectionName}`, - newDirectoryName: `_${slugifiedCollectionName}`, - message: `Renaming collection ${collectionName} to ${slugifiedCollectionName}`, - }) + expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + oldDirectoryName: `_${collectionName}`, + newDirectoryName: `_${slugifiedCollectionName}`, + message: `Renaming collection ${collectionName} to ${slugifiedCollectionName}`, + } + ) expect( mockCollectionYmlService.renameCollectionInOrder ).toHaveBeenCalledWith(reqDetails, { @@ -327,7 +336,7 @@ describe("Collection Directory Service", () => { describe("DeleteDirectory", () => { it("rejects deleting a collection with the same name as protected folders", async () => { await expect( - service.deleteDirectory(reqDetails, { + service.deleteDirectory(reqDetails, mockGithubSessionData, { collectionName: "data", }) ).rejects.toThrowError(ConflictError) @@ -335,14 +344,18 @@ describe("Collection Directory Service", () => { it("Deleting a directory works correctly", async () => { await expect( - service.deleteDirectory(reqDetails, { + service.deleteDirectory(reqDetails, mockGithubSessionData, { collectionName, }) ).resolves.not.toThrowError() - expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith(reqDetails, { - directoryName: `_${collectionName}`, - message: `Deleting collection ${collectionName}`, - }) + expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + directoryName: `_${collectionName}`, + message: `Deleting collection ${collectionName}`, + } + ) expect(mockNavYmlService.deleteCollectionInNav).toHaveBeenCalledWith( reqDetails, { diff --git a/src/services/directoryServices/__tests__/MediaDirectoryService.spec.js b/src/services/directoryServices/__tests__/MediaDirectoryService.spec.js index 62781500c..afc762533 100644 --- a/src/services/directoryServices/__tests__/MediaDirectoryService.spec.js +++ b/src/services/directoryServices/__tests__/MediaDirectoryService.spec.js @@ -1,6 +1,8 @@ +const { config } = require("@config/config") + const { BadRequestError } = require("@errors/BadRequestError") -const { GITHUB_ORG_NAME } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") const PLACEHOLDER_FILE_NAME = ".keep" @@ -11,6 +13,7 @@ describe("Media Directory Service", () => { const imageDirectoryName = `images/${imageSubdirectory}` const fileSubdirectory = "fileDir" const fileDirectoryName = `files/${fileSubdirectory}` + const mockGithubSessionData = "mockData" const objArray = [ { @@ -205,7 +208,7 @@ describe("Media Directory Service", () => { describe("CreateMediaDirectory", () => { it("rejects directories with special characters", async () => { await expect( - service.createMediaDirectory(reqDetails, { + service.createMediaDirectory(reqDetails, mockGithubSessionData, { directoryName: "dir/dir", objArray: undefined, }) @@ -214,7 +217,7 @@ describe("Media Directory Service", () => { it("Creating a directory with no specified files works correctly", async () => { await expect( - service.createMediaDirectory(reqDetails, { + service.createMediaDirectory(reqDetails, mockGithubSessionData, { directoryName: imageDirectoryName, objArray: undefined, }) @@ -241,7 +244,7 @@ describe("Media Directory Service", () => { }, ] await expect( - service.createMediaDirectory(reqDetails, { + service.createMediaDirectory(reqDetails, mockGithubSessionData, { directoryName: newDirectoryName, objArray, }) @@ -255,6 +258,7 @@ describe("Media Directory Service", () => { }) expect(mockBaseDirectoryService.moveFiles).toHaveBeenCalledWith( reqDetails, + mockGithubSessionData, { oldDirectoryName: fileDirectoryName, newDirectoryName, @@ -269,7 +273,7 @@ describe("Media Directory Service", () => { const newDirectoryName = "images/new dir" it("rejects names with special characters", async () => { await expect( - service.renameMediaDirectory(reqDetails, { + service.renameMediaDirectory(reqDetails, mockGithubSessionData, { directoryName: imageDirectoryName, newDirectoryName: "dir/dir", }) @@ -278,30 +282,38 @@ describe("Media Directory Service", () => { it("Renaming a media directory works correctly", async () => { await expect( - service.renameMediaDirectory(reqDetails, { + service.renameMediaDirectory(reqDetails, mockGithubSessionData, { directoryName: imageDirectoryName, newDirectoryName, }) ).resolves.not.toThrowError() - expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith(reqDetails, { - oldDirectoryName: imageDirectoryName, - newDirectoryName, - message: `Renaming media folder ${imageDirectoryName} to ${newDirectoryName}`, - }) + expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + oldDirectoryName: imageDirectoryName, + newDirectoryName, + message: `Renaming media folder ${imageDirectoryName} to ${newDirectoryName}`, + } + ) }) }) describe("DeleteMediaDirectory", () => { it("Deleting a directory works correctly", async () => { await expect( - service.deleteMediaDirectory(reqDetails, { + service.deleteMediaDirectory(reqDetails, mockGithubSessionData, { directoryName: imageDirectoryName, }) ).resolves.not.toThrowError() - expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith(reqDetails, { - directoryName: imageDirectoryName, - message: `Deleting media folder ${imageDirectoryName}`, - }) + expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + directoryName: imageDirectoryName, + message: `Deleting media folder ${imageDirectoryName}`, + } + ) }) }) @@ -310,7 +322,7 @@ describe("Media Directory Service", () => { const targetFiles = objArray.map((item) => item.name) it("Moving media in a media directory to another media directory works correctly", async () => { await expect( - service.moveMediaFiles(reqDetails, { + service.moveMediaFiles(reqDetails, mockGithubSessionData, { directoryName: fileDirectoryName, targetDirectoryName, objArray, @@ -318,6 +330,7 @@ describe("Media Directory Service", () => { ).resolves.not.toThrowError() expect(mockBaseDirectoryService.moveFiles).toHaveBeenCalledWith( reqDetails, + mockGithubSessionData, { oldDirectoryName: fileDirectoryName, newDirectoryName: targetDirectoryName, diff --git a/src/services/directoryServices/__tests__/ResourceDirectoryService.spec.js b/src/services/directoryServices/__tests__/ResourceDirectoryService.spec.js index 4b716a697..0f5f058af 100644 --- a/src/services/directoryServices/__tests__/ResourceDirectoryService.spec.js +++ b/src/services/directoryServices/__tests__/ResourceDirectoryService.spec.js @@ -16,6 +16,7 @@ describe("Resource Directory Service", () => { title: resourceCategoryName, } const sha = "12345" + const mockGithubSessionData = "mockData" const objArray = [ { @@ -242,7 +243,7 @@ describe("Resource Directory Service", () => { const newDirectoryName = "new-dir" it("rejects resource categories with special characters", async () => { await expect( - service.renameResourceDirectory(reqDetails, { + service.renameResourceDirectory(reqDetails, mockGithubSessionData, { resourceRoomName, resourceCategoryName, newDirectoryName: "dir/dir", @@ -255,7 +256,7 @@ describe("Resource Directory Service", () => { }) it("Renaming a resource category works correctly", async () => { await expect( - service.renameResourceDirectory(reqDetails, { + service.renameResourceDirectory(reqDetails, mockGithubSessionData, { resourceRoomName, resourceCategoryName, newDirectoryName, @@ -274,11 +275,15 @@ describe("Resource Directory Service", () => { }, mockContent ) - expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith(reqDetails, { - oldDirectoryName: directoryName, - newDirectoryName: `${resourceRoomName}/${newDirectoryName}`, - message: `Renaming resource category ${resourceCategoryName} to ${newDirectoryName}`, - }) + expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + oldDirectoryName: directoryName, + newDirectoryName: `${resourceRoomName}/${newDirectoryName}`, + message: `Renaming resource category ${resourceCategoryName} to ${newDirectoryName}`, + } + ) expect(mockGitHubService.update).toHaveBeenCalledWith(reqDetails, { fileContent: mockMarkdownContent, sha, @@ -299,7 +304,7 @@ describe("Resource Directory Service", () => { const slugifiedResourceCategory = "test-resource" await expect( - service.renameResourceDirectory(reqDetails, { + service.renameResourceDirectory(reqDetails, mockGithubSessionData, { resourceRoomName, resourceCategoryName, newDirectoryName: originalResourceCategory, @@ -318,11 +323,15 @@ describe("Resource Directory Service", () => { }, mockContent ) - expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith(reqDetails, { - oldDirectoryName: directoryName, - newDirectoryName: `${resourceRoomName}/${slugifiedResourceCategory}`, - message: `Renaming resource category ${resourceCategoryName} to ${slugifiedResourceCategory}`, - }) + expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + oldDirectoryName: directoryName, + newDirectoryName: `${resourceRoomName}/${slugifiedResourceCategory}`, + message: `Renaming resource category ${resourceCategoryName} to ${slugifiedResourceCategory}`, + } + ) expect(mockGitHubService.update).toHaveBeenCalledWith(reqDetails, { fileContent: mockMarkdownContent, sha, @@ -338,15 +347,19 @@ describe("Resource Directory Service", () => { describe("DeleteResourceDirectory", () => { it("Deleting a resource category works correctly", async () => { await expect( - service.deleteResourceDirectory(reqDetails, { + service.deleteResourceDirectory(reqDetails, mockGithubSessionData, { resourceRoomName, resourceCategoryName, }) ).resolves.not.toThrowError() - expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith(reqDetails, { - directoryName, - message: `Deleting resource category ${resourceCategoryName}`, - }) + expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + directoryName, + message: `Deleting resource category ${resourceCategoryName}`, + } + ) }) }) @@ -355,7 +368,7 @@ describe("Resource Directory Service", () => { const targetFiles = ["file.md", "file2.md"] it("Moving resource pages works correctly", async () => { await expect( - service.moveResourcePages(reqDetails, { + service.moveResourcePages(reqDetails, mockGithubSessionData, { resourceRoomName, resourceCategoryName, targetResourceCategory, @@ -364,6 +377,7 @@ describe("Resource Directory Service", () => { ).resolves.not.toThrowError() expect(mockBaseDirectoryService.moveFiles).toHaveBeenCalledWith( reqDetails, + mockGithubSessionData, { oldDirectoryName: `${directoryName}/_posts`, newDirectoryName: `${resourceRoomName}/${targetResourceCategory}/_posts`, diff --git a/src/services/directoryServices/__tests__/ResourceRoomDirectoryService.spec.js b/src/services/directoryServices/__tests__/ResourceRoomDirectoryService.spec.js index 4a5412bdb..3ae80b7fd 100644 --- a/src/services/directoryServices/__tests__/ResourceRoomDirectoryService.spec.js +++ b/src/services/directoryServices/__tests__/ResourceRoomDirectoryService.spec.js @@ -26,6 +26,7 @@ describe("Resource Room Directory Service", () => { resources_name: "resource", } const sha = "12345" + const mockGithubSessionData = "mockData" const reqDetails = { siteName, accessToken } @@ -226,7 +227,7 @@ describe("Resource Room Directory Service", () => { const configSha = "23456" it("rejects resource room names with special characters", async () => { await expect( - service.renameResourceRoomDirectory(reqDetails, { + service.renameResourceRoomDirectory(reqDetails, mockGithubSessionData, { resourceRoomName, newDirectoryName: "dir/dir", }) @@ -243,7 +244,7 @@ describe("Resource Room Directory Service", () => { }) it("Renaming a resource room works correctly", async () => { await expect( - service.renameResourceRoomDirectory(reqDetails, { + service.renameResourceRoomDirectory(reqDetails, mockGithubSessionData, { resourceRoomName, newDirectoryName, }) @@ -266,11 +267,15 @@ describe("Resource Room Directory Service", () => { fileName: INDEX_FILE_NAME, directoryName: newDirectoryName, }) - expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith(reqDetails, { - oldDirectoryName: directoryName, - newDirectoryName, - message: `Renaming resource room from ${resourceRoomName} to ${newDirectoryName}`, - }) + expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + oldDirectoryName: directoryName, + newDirectoryName, + message: `Renaming resource room from ${resourceRoomName} to ${newDirectoryName}`, + } + ) expect(mockConfigYmlService.read).toHaveBeenCalledWith(reqDetails) expect(mockConfigYmlService.update).toHaveBeenCalledWith(reqDetails, { fileContent: { @@ -292,7 +297,7 @@ describe("Resource Room Directory Service", () => { const originalResourceRoom = "Test Resource" const slugifiedResourceRoom = "test-resource" await expect( - service.renameResourceRoomDirectory(reqDetails, { + service.renameResourceRoomDirectory(reqDetails, mockGithubSessionData, { resourceRoomName, newDirectoryName: originalResourceRoom, }) @@ -315,11 +320,15 @@ describe("Resource Room Directory Service", () => { fileName: INDEX_FILE_NAME, directoryName: slugifiedResourceRoom, }) - expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith(reqDetails, { - oldDirectoryName: directoryName, - newDirectoryName: slugifiedResourceRoom, - message: `Renaming resource room from ${resourceRoomName} to ${slugifiedResourceRoom}`, - }) + expect(mockBaseDirectoryService.rename).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + oldDirectoryName: directoryName, + newDirectoryName: slugifiedResourceRoom, + message: `Renaming resource room from ${resourceRoomName} to ${slugifiedResourceRoom}`, + } + ) expect(mockConfigYmlService.read).toHaveBeenCalledWith(reqDetails) expect(mockConfigYmlService.update).toHaveBeenCalledWith(reqDetails, { fileContent: { @@ -338,14 +347,18 @@ describe("Resource Room Directory Service", () => { }) it("Deleting a resource room works correctly", async () => { await expect( - service.deleteResourceRoomDirectory(reqDetails, { + service.deleteResourceRoomDirectory(reqDetails, mockGithubSessionData, { resourceRoomName, }) ).resolves.not.toThrowError() - expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith(reqDetails, { - directoryName, - message: `Deleting resource room ${resourceRoomName}`, - }) + expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + directoryName, + message: `Deleting resource room ${resourceRoomName}`, + } + ) expect(mockConfigYmlService.read).toHaveBeenCalledWith(reqDetails) const newConfigContent = { ...mockConfigContent } delete newConfigContent.resources_name diff --git a/src/services/directoryServices/__tests__/SubcollectionDirectoryService.spec.js b/src/services/directoryServices/__tests__/SubcollectionDirectoryService.spec.js index 3848381d6..860eb7a49 100644 --- a/src/services/directoryServices/__tests__/SubcollectionDirectoryService.spec.js +++ b/src/services/directoryServices/__tests__/SubcollectionDirectoryService.spec.js @@ -7,6 +7,7 @@ describe("Subcollection Directory Service", () => { const accessToken = "test-token" const collectionName = "collection" const subcollectionName = "Subcollection name" + const mockGithubSessionData = "mockData" const objArray = [ { @@ -329,15 +330,19 @@ describe("Subcollection Directory Service", () => { describe("DeleteDirectory", () => { it("Deleting a directory works correctly", async () => { await expect( - service.deleteDirectory(reqDetails, { + service.deleteDirectory(reqDetails, mockGithubSessionData, { collectionName, subcollectionName, }) ).resolves.not.toThrowError() - expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith(reqDetails, { - directoryName: `_${collectionName}/${subcollectionName}`, - message: `Deleting subcollection ${collectionName}/${subcollectionName}`, - }) + expect(mockBaseDirectoryService.delete).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + directoryName: `_${collectionName}/${subcollectionName}`, + message: `Deleting subcollection ${collectionName}/${subcollectionName}`, + } + ) expect( mockCollectionYmlService.deleteSubfolderFromOrder ).toHaveBeenCalledWith(reqDetails, { diff --git a/src/services/fileServices/MdPageServices/CollectionPageService.js b/src/services/fileServices/MdPageServices/CollectionPageService.js index ee0a1ba29..a01475729 100644 --- a/src/services/fileServices/MdPageServices/CollectionPageService.js +++ b/src/services/fileServices/MdPageServices/CollectionPageService.js @@ -14,7 +14,7 @@ class CollectionPageService { } async create( - reqDetails, + sessionData, { fileName, collectionName, content, frontMatter, shouldIgnoreCheck } ) { if ( @@ -24,7 +24,7 @@ class CollectionPageService { throw new BadRequestError("Special characters not allowed in file name") const parsedCollectionName = `_${collectionName}` - await this.collectionYmlService.addItemToOrder(reqDetails, { + await this.collectionYmlService.addItemToOrder(sessionData, { collectionName, item: fileName, }) @@ -33,7 +33,7 @@ class CollectionPageService { delete frontMatter.third_nav_title const newContent = convertDataToMarkdown(frontMatter, content) - const { sha } = await this.gitHubService.create(reqDetails, { + const { sha } = await this.gitHubService.create(sessionData, { content: newContent, fileName, directoryName: parsedCollectionName, @@ -41,10 +41,10 @@ class CollectionPageService { return { fileName, content: { frontMatter, pageBody: content }, sha } } - async read(reqDetails, { fileName, collectionName }) { + async read(sessionData, { fileName, collectionName }) { const parsedCollectionName = `_${collectionName}` const { content: rawContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName, directoryName: parsedCollectionName, @@ -55,12 +55,12 @@ class CollectionPageService { } async update( - reqDetails, + sessionData, { fileName, collectionName, content, frontMatter, sha } ) { const parsedCollectionName = `_${collectionName}` const newContent = convertDataToMarkdown(frontMatter, content) - const { newSha } = await this.gitHubService.update(reqDetails, { + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: newContent, sha, fileName, @@ -74,15 +74,15 @@ class CollectionPageService { } } - async delete(reqDetails, { fileName, collectionName, sha }) { + async delete(sessionData, { fileName, collectionName, sha }) { const parsedCollectionName = `_${collectionName}` // Remove from collection.yml - await this.collectionYmlService.deleteItemFromOrder(reqDetails, { + await this.collectionYmlService.deleteItemFromOrder(sessionData, { collectionName, item: fileName, }) - return this.gitHubService.delete(reqDetails, { + return this.gitHubService.delete(sessionData, { sha, fileName, directoryName: parsedCollectionName, @@ -90,20 +90,20 @@ class CollectionPageService { } async rename( - reqDetails, + sessionData, { oldFileName, newFileName, collectionName, content, frontMatter, sha } ) { if (titleSpecialCharCheck({ title: newFileName, isFile: true })) throw new BadRequestError("Special characters not allowed in file name") const parsedCollectionName = `_${collectionName}` - await this.collectionYmlService.updateItemInOrder(reqDetails, { + await this.collectionYmlService.updateItemInOrder(sessionData, { collectionName, oldItem: oldFileName, newItem: newFileName, }) - await this.gitHubService.delete(reqDetails, { + await this.gitHubService.delete(sessionData, { sha, fileName: oldFileName, directoryName: parsedCollectionName, @@ -113,7 +113,7 @@ class CollectionPageService { delete frontMatter.third_nav_title const newContent = convertDataToMarkdown(frontMatter, content) - const { sha: newSha } = await this.gitHubService.create(reqDetails, { + const { sha: newSha } = await this.gitHubService.create(sessionData, { content: newContent, fileName: newFileName, directoryName: parsedCollectionName, diff --git a/src/services/fileServices/MdPageServices/ContactUsPageService.js b/src/services/fileServices/MdPageServices/ContactUsPageService.js index e644a8061..0d9c17c72 100644 --- a/src/services/fileServices/MdPageServices/ContactUsPageService.js +++ b/src/services/fileServices/MdPageServices/ContactUsPageService.js @@ -12,11 +12,11 @@ class ContactUsPageService { this.footerYmlService = footerYmlService } - async read(reqDetails) { + async read(sessionData) { // Due to template intricacies, the feedback url is read from/stored in the footer - // the contact-us link to the feedback page is taken from the feedback url stored in the footer.yml file const { content: rawContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName: CONTACT_US_FILE_NAME, directoryName: CONTACT_US_DIRECTORY_NAME, @@ -24,17 +24,17 @@ class ContactUsPageService { ) const { frontMatter, pageContent } = retrieveDataFromMarkdown(rawContent) const { content: footerContent } = await this.footerYmlService.read( - reqDetails + sessionData ) frontMatter.feedback = footerContent.feedback return { content: { frontMatter, pageBody: pageContent }, sha } } - async update(reqDetails, { content, frontMatter, sha }) { + async update(sessionData, { content, frontMatter, sha }) { // Due to template intricacies, the feedback url is read from/stored in the footer - // the contact-us link to the feedback page is taken from the feedback url stored in the footer.yml file const newContent = convertDataToMarkdown(frontMatter, content) - const { newSha } = await this.gitHubService.update(reqDetails, { + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: newContent, sha, fileName: CONTACT_US_FILE_NAME, @@ -43,9 +43,9 @@ class ContactUsPageService { const { content: footerContent, sha: footerSha, - } = await this.footerYmlService.read(reqDetails) + } = await this.footerYmlService.read(sessionData) footerContent.feedback = frontMatter.feedback - await this.footerYmlService.update(reqDetails, { + await this.footerYmlService.update(sessionData, { fileContent: footerContent, sha: footerSha, }) diff --git a/src/services/fileServices/MdPageServices/HomepagePageService.js b/src/services/fileServices/MdPageServices/HomepagePageService.js index 4136ea839..fe267717a 100644 --- a/src/services/fileServices/MdPageServices/HomepagePageService.js +++ b/src/services/fileServices/MdPageServices/HomepagePageService.js @@ -10,9 +10,9 @@ class HomepagePageService { this.gitHubService = gitHubService } - async read(reqDetails) { + async read(sessionData) { const { content: rawContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName: HOMEPAGE_FILE_NAME, } @@ -21,9 +21,9 @@ class HomepagePageService { return { content: { frontMatter, pageBody: pageContent }, sha } } - async update(reqDetails, { content, frontMatter, sha }) { + async update(sessionData, { content, frontMatter, sha }) { const newContent = convertDataToMarkdown(frontMatter, content) - const { newSha } = await this.gitHubService.update(reqDetails, { + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: newContent, sha, fileName: HOMEPAGE_FILE_NAME, diff --git a/src/services/fileServices/MdPageServices/MediaFileService.js b/src/services/fileServices/MdPageServices/MediaFileService.js index 7e0d0c4f6..369e12e54 100644 --- a/src/services/fileServices/MdPageServices/MediaFileService.js +++ b/src/services/fileServices/MdPageServices/MediaFileService.js @@ -1,11 +1,16 @@ +const { config } = require("@config/config") + +const logger = require("@logger/logger") + const { BadRequestError } = require("@errors/BadRequestError") const { MediaTypeError } = require("@errors/MediaTypeError") -const { GITHUB_ORG_NAME } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") const { validateAndSanitizeFileUpload, ALLOWED_FILE_EXTENSIONS, + scanFileForVirus, } = require("@utils/file-upload-utils") const { isMediaPathValid } = require("@validators/validators") @@ -24,13 +29,25 @@ class MediaFileService { throw new BadRequestError("Special characters not allowed in file name") } - async create(reqDetails, { fileName, directoryName, content }) { + async create(sessionData, { fileName, directoryName, content }) { this.mediaNameChecks({ directoryName, fileName }) + + const [, fileContent] = content.split(",") + const fileBuffer = Buffer.from(fileContent, "base64") + + // Scan file for virus - cloudmersive API + const virusScanRes = await scanFileForVirus(fileBuffer) + logger.info(`File scan result: ${virusScanRes.CleanResult}`) + if (!virusScanRes || !virusScanRes.CleanResult) { + throw new BadRequestError("File did not pass virus scan") + } + + // Sanitize and validate file const sanitizedContent = await validateAndSanitizeFileUpload(content) if (!sanitizedContent) { throw new MediaTypeError(`File extension is not within the approved list`) } - const { sha } = await this.gitHubService.create(reqDetails, { + const { sha } = await this.gitHubService.create(sessionData, { content: sanitizedContent, fileName, directoryName, @@ -39,9 +56,9 @@ class MediaFileService { return { name: fileName, content, sha } } - async read(reqDetails, { fileName, directoryName }) { - const { siteName } = reqDetails - const directoryData = await this.gitHubService.readDirectory(reqDetails, { + async read(sessionData, { fileName, directoryName }) { + const { siteName } = sessionData + const directoryData = await this.gitHubService.readDirectory(sessionData, { directoryName, }) const mediaType = directoryName.split("/")[0] @@ -51,7 +68,7 @@ class MediaFileService { ) const { sha } = targetFile const { private: isPrivate } = await this.gitHubService.getRepoInfo( - reqDetails + sessionData ) const fileData = { mediaUrl: `https://raw.githubusercontent.com/${GITHUB_ORG_NAME}/${siteName}/staging/${directoryName @@ -68,7 +85,7 @@ class MediaFileService { // Generate blob url const imageExt = fileName.slice(fileName.lastIndexOf(".") + 1) const contentType = `image/${imageExt === "svg" ? "svg+xml" : imageExt}` - const { content } = await this.gitHubService.readMedia(reqDetails, { + const { content } = await this.gitHubService.readMedia(sessionData, { fileSha: sha, }) const blobURL = `data:${contentType};base64,${content}` @@ -77,18 +94,18 @@ class MediaFileService { return fileData } - async update(reqDetails, { fileName, directoryName, content, sha }) { + async update(sessionData, { fileName, directoryName, content, sha }) { this.mediaNameChecks({ directoryName, fileName }) const sanitizedContent = await validateAndSanitizeFileUpload(content) if (!sanitizedContent) { throw new MediaTypeError(`File extension is not within the approved list`) } - await this.gitHubService.delete(reqDetails, { + await this.gitHubService.delete(sessionData, { sha, fileName, directoryName, }) - const { sha: newSha } = await this.gitHubService.create(reqDetails, { + const { sha: newSha } = await this.gitHubService.create(sessionData, { content: sanitizedContent, fileName, directoryName, @@ -102,16 +119,20 @@ class MediaFileService { } } - async delete(reqDetails, { fileName, directoryName, sha }) { + async delete(sessionData, { fileName, directoryName, sha }) { this.mediaNameChecks({ directoryName, fileName }) - return this.gitHubService.delete(reqDetails, { + return this.gitHubService.delete(sessionData, { sha, fileName, directoryName, }) } - async rename(reqDetails, { oldFileName, newFileName, directoryName, sha }) { + async rename( + sessionData, + githubSessionData, + { oldFileName, newFileName, directoryName, sha } + ) { this.mediaNameChecks({ directoryName, fileName: oldFileName }) this.mediaNameChecks({ directoryName, fileName: newFileName }) const oldExt = getFileExt(oldFileName) @@ -129,9 +150,13 @@ class MediaFileService { ) } - const gitTree = await this.gitHubService.getTree(reqDetails, { - isRecursive: true, - }) + const gitTree = await this.gitHubService.getTree( + sessionData, + githubSessionData, + { + isRecursive: true, + } + ) const newGitTree = [] gitTree.forEach((item) => { if (item.path.startsWith(`${directoryName}/`) && item.type !== "tree") { @@ -151,11 +176,15 @@ class MediaFileService { } }) - const newCommitSha = await this.gitHubService.updateTree(reqDetails, { - gitTree: newGitTree, - message: `Renamed ${oldFileName} to ${newFileName}`, - }) - await this.gitHubService.updateRepoState(reqDetails, { + const newCommitSha = await this.gitHubService.updateTree( + sessionData, + githubSessionData, + { + gitTree: newGitTree, + message: `Renamed ${oldFileName} to ${newFileName}`, + } + ) + await this.gitHubService.updateRepoState(sessionData, { commitSha: newCommitSha, }) diff --git a/src/services/fileServices/MdPageServices/ResourcePageService.js b/src/services/fileServices/MdPageServices/ResourcePageService.js index c9cb2773b..80f093e37 100644 --- a/src/services/fileServices/MdPageServices/ResourcePageService.js +++ b/src/services/fileServices/MdPageServices/ResourcePageService.js @@ -37,7 +37,7 @@ class ResourcePageService { } async create( - reqDetails, + sessionData, { fileName, resourceRoomName, resourceCategoryName, content, frontMatter } ) { this.validateAndRetrieveResourceFileMetadata(fileName) @@ -48,7 +48,7 @@ class ResourcePageService { const newContent = convertDataToMarkdown(frontMatter, content) - const { sha } = await this.gitHubService.create(reqDetails, { + const { sha } = await this.gitHubService.create(sessionData, { content: newContent, fileName, directoryName: parsedDirectoryName, @@ -56,13 +56,16 @@ class ResourcePageService { return { fileName, content: { frontMatter, pageBody: content }, sha } } - async read(reqDetails, { fileName, resourceRoomName, resourceCategoryName }) { + async read( + sessionData, + { fileName, resourceRoomName, resourceCategoryName } + ) { const parsedDirectoryName = this.getResourceDirectoryPath({ resourceRoomName, resourceCategoryName, }) const { content: rawContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName, directoryName: parsedDirectoryName, @@ -73,7 +76,7 @@ class ResourcePageService { } async update( - reqDetails, + sessionData, { fileName, resourceRoomName, @@ -88,7 +91,7 @@ class ResourcePageService { resourceCategoryName, }) const newContent = convertDataToMarkdown(frontMatter, content) - const { newSha } = await this.gitHubService.update(reqDetails, { + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: newContent, sha, fileName, @@ -103,7 +106,7 @@ class ResourcePageService { } async delete( - reqDetails, + sessionData, { fileName, resourceRoomName, resourceCategoryName, sha } ) { const parsedDirectoryName = this.getResourceDirectoryPath({ @@ -111,7 +114,7 @@ class ResourcePageService { resourceCategoryName, }) - return this.gitHubService.delete(reqDetails, { + return this.gitHubService.delete(sessionData, { sha, fileName, directoryName: parsedDirectoryName, @@ -119,7 +122,7 @@ class ResourcePageService { } async rename( - reqDetails, + sessionData, { oldFileName, newFileName, @@ -136,7 +139,7 @@ class ResourcePageService { resourceCategoryName, }) - await this.gitHubService.delete(reqDetails, { + await this.gitHubService.delete(sessionData, { sha, fileName: oldFileName, directoryName: parsedDirectoryName, @@ -144,7 +147,7 @@ class ResourcePageService { const newContent = convertDataToMarkdown(frontMatter, content) - const { sha: newSha } = await this.gitHubService.create(reqDetails, { + const { sha: newSha } = await this.gitHubService.create(sessionData, { content: newContent, fileName: newFileName, directoryName: parsedDirectoryName, diff --git a/src/services/fileServices/MdPageServices/SubcollectionPageService.js b/src/services/fileServices/MdPageServices/SubcollectionPageService.js index 539dbeff2..c979b1ab5 100644 --- a/src/services/fileServices/MdPageServices/SubcollectionPageService.js +++ b/src/services/fileServices/MdPageServices/SubcollectionPageService.js @@ -15,7 +15,7 @@ class SubcollectionPageService { } async create( - reqDetails, + sessionData, { fileName, collectionName, @@ -32,7 +32,7 @@ class SubcollectionPageService { throw new BadRequestError("Special characters not allowed in file name") const parsedDirectoryName = `_${collectionName}/${subcollectionName}` - await this.collectionYmlService.addItemToOrder(reqDetails, { + await this.collectionYmlService.addItemToOrder(sessionData, { collectionName, item: `${subcollectionName}/${fileName}`, }) @@ -40,7 +40,7 @@ class SubcollectionPageService { frontMatter.third_nav_title = deslugifyCollectionName(subcollectionName) const newContent = convertDataToMarkdown(frontMatter, content) - const { sha } = await this.gitHubService.create(reqDetails, { + const { sha } = await this.gitHubService.create(sessionData, { content: newContent, fileName, directoryName: parsedDirectoryName, @@ -48,10 +48,10 @@ class SubcollectionPageService { return { fileName, content: { frontMatter, pageBody: content }, sha } } - async read(reqDetails, { fileName, collectionName, subcollectionName }) { + async read(sessionData, { fileName, collectionName, subcollectionName }) { const parsedDirectoryName = `_${collectionName}/${subcollectionName}` const { content: rawContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName, directoryName: parsedDirectoryName, @@ -62,12 +62,12 @@ class SubcollectionPageService { } async update( - reqDetails, + sessionData, { fileName, collectionName, subcollectionName, content, frontMatter, sha } ) { const parsedDirectoryName = `_${collectionName}/${subcollectionName}` const newContent = convertDataToMarkdown(frontMatter, content) - const { newSha } = await this.gitHubService.update(reqDetails, { + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: newContent, sha, fileName, @@ -82,17 +82,17 @@ class SubcollectionPageService { } async delete( - reqDetails, + sessionData, { fileName, collectionName, subcollectionName, sha } ) { const parsedDirectoryName = `_${collectionName}/${subcollectionName}` // Remove from collection.yml - await this.collectionYmlService.deleteItemFromOrder(reqDetails, { + await this.collectionYmlService.deleteItemFromOrder(sessionData, { collectionName, item: `${subcollectionName}/${fileName}`, }) - return this.gitHubService.delete(reqDetails, { + return this.gitHubService.delete(sessionData, { sha, fileName, directoryName: parsedDirectoryName, @@ -100,7 +100,7 @@ class SubcollectionPageService { } async rename( - reqDetails, + sessionData, { oldFileName, newFileName, @@ -115,13 +115,13 @@ class SubcollectionPageService { throw new BadRequestError("Special characters not allowed in file name") const parsedDirectoryName = `_${collectionName}/${subcollectionName}` - await this.collectionYmlService.updateItemInOrder(reqDetails, { + await this.collectionYmlService.updateItemInOrder(sessionData, { collectionName, oldItem: `${subcollectionName}/${oldFileName}`, newItem: `${subcollectionName}/${newFileName}`, }) - await this.gitHubService.delete(reqDetails, { + await this.gitHubService.delete(sessionData, { sha, fileName: oldFileName, directoryName: parsedDirectoryName, @@ -129,7 +129,7 @@ class SubcollectionPageService { const newContent = convertDataToMarkdown(frontMatter, content) - const { sha: newSha } = await this.gitHubService.create(reqDetails, { + const { sha: newSha } = await this.gitHubService.create(sessionData, { content: newContent, fileName: newFileName, directoryName: parsedDirectoryName, @@ -145,13 +145,13 @@ class SubcollectionPageService { // Used for updating the third_nav_title only without touching the collection.yml async updateSubcollection( - reqDetails, + sessionData, { fileName, collectionName, oldSubcollectionName, newSubcollectionName } ) { const { sha, content: { frontMatter, pageBody }, - } = await this.read(reqDetails, { + } = await this.read(sessionData, { fileName, collectionName, subcollectionName: oldSubcollectionName, @@ -161,12 +161,12 @@ class SubcollectionPageService { const parsedNewDirectoryName = `_${collectionName}/${newSubcollectionName}` frontMatter.third_nav_title = deslugifyCollectionName(newSubcollectionName) const newContent = convertDataToMarkdown(frontMatter, pageBody) - await this.gitHubService.delete(reqDetails, { + await this.gitHubService.delete(sessionData, { sha, fileName, directoryName: parsedOldDirectoryName, }) - return this.gitHubService.create(reqDetails, { + return this.gitHubService.create(sessionData, { content: newContent, fileName, directoryName: parsedNewDirectoryName, diff --git a/src/services/fileServices/MdPageServices/UnlinkedPageService.js b/src/services/fileServices/MdPageServices/UnlinkedPageService.js index e4cf15a91..fb7b3f4f9 100644 --- a/src/services/fileServices/MdPageServices/UnlinkedPageService.js +++ b/src/services/fileServices/MdPageServices/UnlinkedPageService.js @@ -15,7 +15,7 @@ class UnlinkedPageService { } async create( - reqDetails, + sessionData, { fileName, content, frontMatter, shouldIgnoreCheck } ) { // Ensure that third_nav_title is removed for files that are being moved from collections @@ -26,7 +26,7 @@ class UnlinkedPageService { throw new BadRequestError("Special characters not allowed in file name") delete frontMatter.third_nav_title const newContent = convertDataToMarkdown(frontMatter, content) - const { sha } = await this.gitHubService.create(reqDetails, { + const { sha } = await this.gitHubService.create(sessionData, { content: newContent, fileName, directoryName: UNLINKED_PAGES_DIRECTORY_NAME, @@ -34,9 +34,9 @@ class UnlinkedPageService { return { fileName, content: { frontMatter, pageBody: content }, sha } } - async read(reqDetails, { fileName }) { + async read(sessionData, { fileName }) { const { content: rawContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName, directoryName: UNLINKED_PAGES_DIRECTORY_NAME, @@ -46,9 +46,9 @@ class UnlinkedPageService { return { fileName, content: { frontMatter, pageBody: pageContent }, sha } } - async update(reqDetails, { fileName, content, frontMatter, sha }) { + async update(sessionData, { fileName, content, frontMatter, sha }) { const newContent = convertDataToMarkdown(frontMatter, content) - const { newSha } = await this.gitHubService.update(reqDetails, { + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: newContent, sha, fileName, @@ -62,8 +62,8 @@ class UnlinkedPageService { } } - async delete(reqDetails, { fileName, sha }) { - return this.gitHubService.delete(reqDetails, { + async delete(sessionData, { fileName, sha }) { + return this.gitHubService.delete(sessionData, { sha, fileName, directoryName: UNLINKED_PAGES_DIRECTORY_NAME, @@ -71,18 +71,18 @@ class UnlinkedPageService { } async rename( - reqDetails, + sessionData, { oldFileName, newFileName, content, frontMatter, sha } ) { if (titleSpecialCharCheck({ title: newFileName, isFile: true })) throw new BadRequestError("Special characters not allowed in file name") const newContent = convertDataToMarkdown(frontMatter, content) - await this.gitHubService.delete(reqDetails, { + await this.gitHubService.delete(sessionData, { sha, fileName: oldFileName, directoryName: UNLINKED_PAGES_DIRECTORY_NAME, }) - const { sha: newSha } = await this.gitHubService.create(reqDetails, { + const { sha: newSha } = await this.gitHubService.create(sessionData, { content: newContent, fileName: newFileName, directoryName: UNLINKED_PAGES_DIRECTORY_NAME, diff --git a/src/services/fileServices/MdPageServices/__tests__/MediaFileService.spec.js b/src/services/fileServices/MdPageServices/__tests__/MediaFileService.spec.js index 649aed3aa..bc1707552 100644 --- a/src/services/fileServices/MdPageServices/__tests__/MediaFileService.spec.js +++ b/src/services/fileServices/MdPageServices/__tests__/MediaFileService.spec.js @@ -1,6 +1,8 @@ +const { config } = require("@config/config") + const { BadRequestError } = require("@errors/BadRequestError") -const { GITHUB_ORG_NAME } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") describe("Media File Service", () => { const siteName = "test-site" @@ -8,9 +10,10 @@ describe("Media File Service", () => { const imageName = "test image.png" const fileName = "test file.pdf" const directoryName = "images/subfolder" - const mockContent = "test" + const mockContent = "schema, test" const mockSanitizedContent = "sanitized-test" const sha = "12345" + const mockGithubSessionData = "githubData" const reqDetails = { siteName, accessToken } @@ -32,6 +35,7 @@ describe("Media File Service", () => { .fn() .mockReturnValue(mockSanitizedContent), ALLOWED_FILE_EXTENSIONS: ["pdf"], + scanFileForVirus: jest.fn().mockReturnValue({ CleanResult: true }), })) const { @@ -285,7 +289,7 @@ describe("Media File Service", () => { it("rejects renaming to page names with special characters", async () => { await expect( - service.rename(reqDetails, { + service.rename(reqDetails, mockGithubSessionData, { oldFileName, newFileName: "file/file.pdf", directoryName, @@ -295,7 +299,7 @@ describe("Media File Service", () => { }) it("Renaming pages works correctly", async () => { await expect( - service.rename(reqDetails, { + service.rename(reqDetails, mockGithubSessionData, { oldFileName, newFileName: fileName, directoryName, @@ -307,13 +311,21 @@ describe("Media File Service", () => { oldSha: sha, sha, }) - expect(mockGithubService.getTree).toHaveBeenCalledWith(reqDetails, { - isRecursive: true, - }) - expect(mockGithubService.updateTree).toHaveBeenCalledWith(reqDetails, { - gitTree: mockedMovedTree, - message: `Renamed ${oldFileName} to ${fileName}`, - }) + expect(mockGithubService.getTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + isRecursive: true, + } + ) + expect(mockGithubService.updateTree).toHaveBeenCalledWith( + reqDetails, + mockGithubSessionData, + { + gitTree: mockedMovedTree, + message: `Renamed ${oldFileName} to ${fileName}`, + } + ) expect(mockGithubService.updateRepoState).toHaveBeenCalledWith( reqDetails, { diff --git a/src/services/fileServices/YmlFileServices/CollectionYmlService.js b/src/services/fileServices/YmlFileServices/CollectionYmlService.js index c7818bc1b..1dd63433b 100644 --- a/src/services/fileServices/YmlFileServices/CollectionYmlService.js +++ b/src/services/fileServices/YmlFileServices/CollectionYmlService.js @@ -1,5 +1,9 @@ const _ = require("lodash") -const yaml = require("yaml") + +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const COLLECTION_FILE_NAME = "collection.yml" @@ -8,21 +12,21 @@ class CollectionYmlService { this.gitHubService = gitHubService } - async read(reqDetails, { collectionName }) { + async read(sessionData, { collectionName }) { const { content: unparsedContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName: COLLECTION_FILE_NAME, directoryName: `_${collectionName}`, } ) - const content = yaml.parse(unparsedContent) + const content = sanitizedYamlParse(unparsedContent) return { content, sha } } - async update(reqDetails, { collectionName, fileContent, sha }) { - const stringifiedContent = yaml.stringify(fileContent) - const { newSha } = await this.gitHubService.update(reqDetails, { + async update(sessionData, { collectionName, fileContent, sha }) { + const stringifiedContent = sanitizedYamlStringify(fileContent) + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: stringifiedContent, sha, fileName: COLLECTION_FILE_NAME, @@ -31,7 +35,7 @@ class CollectionYmlService { return { newSha } } - async create(reqDetails, { collectionName, orderArray }) { + async create(sessionData, { collectionName, orderArray }) { const contentObject = { collections: { [collectionName]: { @@ -40,21 +44,21 @@ class CollectionYmlService { }, }, } - const stringifiedContent = yaml.stringify(contentObject) - return this.gitHubService.create(reqDetails, { + const stringifiedContent = sanitizedYamlStringify(contentObject) + return this.gitHubService.create(sessionData, { content: stringifiedContent, fileName: COLLECTION_FILE_NAME, directoryName: `_${collectionName}`, }) } - async listContents(reqDetails, { collectionName }) { - const { content } = await this.read(reqDetails, { collectionName }) + async listContents(sessionData, { collectionName }) { + const { content } = await this.read(sessionData, { collectionName }) return content.collections[collectionName].order } - async addItemToOrder(reqDetails, { collectionName, item, index }) { - const { content, sha } = await this.read(reqDetails, { collectionName }) + async addItemToOrder(sessionData, { collectionName, item, index }) { + const { content, sha } = await this.read(sessionData, { collectionName }) let newIndex = index if (index === undefined) { @@ -72,20 +76,20 @@ class CollectionYmlService { } content.collections[collectionName].order.splice(newIndex, 0, item) - return this.update(reqDetails, { + return this.update(sessionData, { collectionName, fileContent: content, sha, }) } - async deleteItemFromOrder(reqDetails, { collectionName, item }) { - const { content, sha } = await this.read(reqDetails, { collectionName }) + async deleteItemFromOrder(sessionData, { collectionName, item }) { + const { content, sha } = await this.read(sessionData, { collectionName }) const index = content.collections[collectionName].order.indexOf(item) if (index !== -1) { content.collections[collectionName].order.splice(index, 1) - return this.update(reqDetails, { + return this.update(sessionData, { collectionName, fileContent: content, sha, @@ -93,14 +97,14 @@ class CollectionYmlService { } } - async updateItemInOrder(reqDetails, { collectionName, oldItem, newItem }) { - const { content, sha } = await this.read(reqDetails, { collectionName }) + async updateItemInOrder(sessionData, { collectionName, oldItem, newItem }) { + const { content, sha } = await this.read(sessionData, { collectionName }) const index = content.collections[collectionName].order.indexOf(oldItem) content.collections[collectionName].order.splice(index, 1) content.collections[collectionName].order.splice(index, 0, newItem) - return this.update(reqDetails, { + return this.update(sessionData, { collectionName, fileContent: content, sha, @@ -108,10 +112,10 @@ class CollectionYmlService { } async renameCollectionInOrder( - reqDetails, + sessionData, { oldCollectionName, newCollectionName } ) { - const { content, sha } = await this.read(reqDetails, { + const { content, sha } = await this.read(sessionData, { collectionName: newCollectionName, }) @@ -121,15 +125,15 @@ class CollectionYmlService { }, } - return this.update(reqDetails, { + return this.update(sessionData, { collectionName: newCollectionName, fileContent: contentObject, sha, }) } - async deleteSubfolderFromOrder(reqDetails, { collectionName, subfolder }) { - const { content, sha } = await this.read(reqDetails, { collectionName }) + async deleteSubfolderFromOrder(sessionData, { collectionName, subfolder }) { + const { content, sha } = await this.read(sessionData, { collectionName }) const filteredOrder = content.collections[collectionName].order.filter( (item) => !item.includes(`${subfolder}/`) @@ -137,7 +141,7 @@ class CollectionYmlService { const newContentObject = _.cloneDeep(content) newContentObject.collections[collectionName].order = filteredOrder - return this.update(reqDetails, { + return this.update(sessionData, { collectionName, fileContent: newContentObject, sha, @@ -145,10 +149,10 @@ class CollectionYmlService { } async renameSubfolderInOrder( - reqDetails, + sessionData, { collectionName, oldSubfolder, newSubfolder } ) { - const { content, sha } = await this.read(reqDetails, { collectionName }) + const { content, sha } = await this.read(sessionData, { collectionName }) const renamedOrder = content.collections[collectionName].order.map( (item) => { if (item.includes(`${oldSubfolder}/`)) @@ -159,15 +163,15 @@ class CollectionYmlService { const newContentObject = _.cloneDeep(content) newContentObject.collections[collectionName].order = renamedOrder - return this.update(reqDetails, { + return this.update(sessionData, { collectionName, fileContent: newContentObject, sha, }) } - async updateOrder(reqDetails, { collectionName, newOrder }) { - const { sha } = await this.read(reqDetails, { collectionName }) + async updateOrder(sessionData, { collectionName, newOrder }) { + const { sha } = await this.read(sessionData, { collectionName }) const contentObject = { collections: { [collectionName]: { @@ -176,8 +180,8 @@ class CollectionYmlService { }, }, } - const stringifiedContent = yaml.stringify(contentObject) - return this.gitHubService.update(reqDetails, { + const stringifiedContent = sanitizedYamlStringify(contentObject) + return this.gitHubService.update(sessionData, { directoryName: `_${collectionName}`, fileContent: stringifiedContent, fileName: COLLECTION_FILE_NAME, diff --git a/src/services/fileServices/YmlFileServices/ConfigYmlService.js b/src/services/fileServices/YmlFileServices/ConfigYmlService.js index 76e567982..58e0ba269 100644 --- a/src/services/fileServices/YmlFileServices/ConfigYmlService.js +++ b/src/services/fileServices/YmlFileServices/ConfigYmlService.js @@ -1,4 +1,7 @@ -const yaml = require("yaml") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const CONFIG_FILE_NAME = "_config.yml" @@ -7,19 +10,19 @@ class ConfigYmlService { this.gitHubService = gitHubService } - async read(reqDetails) { + async read(sessionData) { const { content: unparsedContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName: CONFIG_FILE_NAME, } ) - const content = yaml.parse(unparsedContent) + const content = sanitizedYamlParse(unparsedContent) return { content, sha } } async update(reqDetails, { fileContent, sha }) { - const stringifiedContent = yaml.stringify(fileContent) + const stringifiedContent = sanitizedYamlStringify(fileContent) const { newSha } = await this.gitHubService.update(reqDetails, { fileContent: stringifiedContent, sha, diff --git a/src/services/fileServices/YmlFileServices/FooterYmlService.js b/src/services/fileServices/YmlFileServices/FooterYmlService.js index 18aa5d29c..ce0f4616d 100644 --- a/src/services/fileServices/YmlFileServices/FooterYmlService.js +++ b/src/services/fileServices/YmlFileServices/FooterYmlService.js @@ -1,4 +1,7 @@ -const yaml = require("yaml") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const FOOTER_FILE_NAME = "footer.yml" const FOOTER_FILE_DIR = "_data" @@ -8,21 +11,21 @@ class FooterYmlService { this.gitHubService = gitHubService } - async read(reqDetails) { + async read(sessionData) { const { content: unparsedContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName: FOOTER_FILE_NAME, directoryName: FOOTER_FILE_DIR, } ) - const content = yaml.parse(unparsedContent) + const content = sanitizedYamlParse(unparsedContent) return { content, sha } } - async update(reqDetails, { fileContent, sha }) { - const stringifiedContent = yaml.stringify(fileContent) - const { newSha } = await this.gitHubService.update(reqDetails, { + async update(sessionData, { fileContent, sha }) { + const stringifiedContent = sanitizedYamlStringify(fileContent) + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: stringifiedContent, sha, fileName: FOOTER_FILE_NAME, diff --git a/src/services/fileServices/YmlFileServices/NavYmlService.js b/src/services/fileServices/YmlFileServices/NavYmlService.js index 9f0a7208c..5ea2385f3 100644 --- a/src/services/fileServices/YmlFileServices/NavYmlService.js +++ b/src/services/fileServices/YmlFileServices/NavYmlService.js @@ -1,6 +1,8 @@ -const yaml = require("yaml") - const { deslugifyCollectionName } = require("@utils/utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const NAV_FILE_NAME = "navigation.yml" const NAV_FILE_DIR = "_data" @@ -10,21 +12,21 @@ class NavYmlService { this.gitHubService = gitHubService } - async read(reqDetails) { + async read(sessionData) { const { content: unparsedContent, sha } = await this.gitHubService.read( - reqDetails, + sessionData, { fileName: NAV_FILE_NAME, directoryName: NAV_FILE_DIR, } ) - const content = yaml.parse(unparsedContent) + const content = sanitizedYamlParse(unparsedContent) return { content, sha } } - async update(reqDetails, { fileContent, sha }) { - const stringifiedContent = yaml.stringify(fileContent) - const { newSha } = await this.gitHubService.update(reqDetails, { + async update(sessionData, { fileContent, sha }) { + const stringifiedContent = sanitizedYamlStringify(fileContent) + const { newSha } = await this.gitHubService.update(sessionData, { fileContent: stringifiedContent, sha, fileName: NAV_FILE_NAME, @@ -33,20 +35,20 @@ class NavYmlService { return { newSha } } - async createCollectionInNav(reqDetails, { collectionName }) { - const { content, sha } = await this.read(reqDetails) + async createCollectionInNav(sessionData, { collectionName }) { + const { content, sha } = await this.read(sessionData) content.links.push({ title: deslugifyCollectionName(collectionName), collection: collectionName, }) - return this.update(reqDetails, { fileContent: content, sha }) + return this.update(sessionData, { fileContent: content, sha }) } async renameCollectionInNav( - reqDetails, + sessionData, { oldCollectionName, newCollectionName } ) { - const { content, sha } = await this.read(reqDetails) + const { content, sha } = await this.read(sessionData) const newNavLinks = content.links.map((link) => { if (link.collection === oldCollectionName) { return { @@ -60,11 +62,11 @@ class NavYmlService { ...content, links: newNavLinks, } - return this.update(reqDetails, { fileContent: newNavContentObject, sha }) + return this.update(sessionData, { fileContent: newNavContentObject, sha }) } - async deleteCollectionInNav(reqDetails, { collectionName }) { - const { content, sha } = await this.read(reqDetails) + async deleteCollectionInNav(sessionData, { collectionName }) { + const { content, sha } = await this.read(sessionData) const newNavLinks = content.links.filter( (link) => link.collection !== collectionName ) @@ -72,7 +74,7 @@ class NavYmlService { ...content, links: newNavLinks, } - return this.update(reqDetails, { fileContent: newNavContentObject, sha }) + return this.update(sessionData, { fileContent: newNavContentObject, sha }) } } diff --git a/src/services/fileServices/YmlFileServices/__tests__/CollectionYmlService.spec.js b/src/services/fileServices/YmlFileServices/__tests__/CollectionYmlService.spec.js index aac57b626..4205faf20 100644 --- a/src/services/fileServices/YmlFileServices/__tests__/CollectionYmlService.spec.js +++ b/src/services/fileServices/YmlFileServices/__tests__/CollectionYmlService.spec.js @@ -3,7 +3,8 @@ const { } = require("@services/fileServices/YmlFileServices/CollectionYmlService") const COLLECTION_FILE_NAME = "collection.yml" -const yaml = require("yaml") +const { sanitizedYamlStringify } = require("@utils/yaml-utils") + const _ = require("lodash") describe("Collection Yml Service", () => { @@ -33,7 +34,7 @@ describe("Collection Yml Service", () => { }, }, } - const mockRawContent = yaml.stringify(mockParsedContent) + const mockRawContent = sanitizedYamlStringify(mockParsedContent) const mockGithubService = { create: jest.fn(), @@ -92,7 +93,7 @@ describe("Collection Yml Service", () => { mockGithubService.create.mockResolvedValueOnce({ sha }) }) it("Creating a collection.yml file with no specified files works correctly", async () => { - const content = yaml.stringify({ + const content = sanitizedYamlStringify({ collections: { [collectionName]: { output: true, @@ -114,7 +115,7 @@ describe("Collection Yml Service", () => { }) }) it("Creating a collection.yml file with specified files works correctly", async () => { - const content = yaml.stringify({ + const content = sanitizedYamlStringify({ collections: { [collectionName]: { output: true, @@ -167,7 +168,7 @@ describe("Collection Yml Service", () => { const expectedArray = [newFileName, ...orderArray] const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -189,7 +190,7 @@ describe("Collection Yml Service", () => { const expectedArray = [newFileName, ...orderArray] const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -212,7 +213,7 @@ describe("Collection Yml Service", () => { expectedArray.splice(1, 0, newFileName) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -236,7 +237,7 @@ describe("Collection Yml Service", () => { expectedArray.splice(addedIndex, 0, newFileName) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -261,7 +262,7 @@ describe("Collection Yml Service", () => { expectedArray.splice(addedIndex, 0, newFileName) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -296,7 +297,7 @@ describe("Collection Yml Service", () => { ) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.deleteItemFromOrder(reqDetails, { collectionName, @@ -318,7 +319,7 @@ describe("Collection Yml Service", () => { const expectedArray = orderArray.filter((item) => item !== itemName) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.deleteItemFromOrder(reqDetails, { collectionName, @@ -366,7 +367,7 @@ describe("Collection Yml Service", () => { expectedArray.splice(index, 0, renamedItem) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.updateItemInOrder(reqDetails, { collectionName, @@ -400,7 +401,7 @@ describe("Collection Yml Service", () => { [renamedCollection]: mockParsedContent.collections[collectionName], }, } - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.renameCollectionInOrder(reqDetails, { oldCollectionName: collectionName, @@ -432,7 +433,7 @@ describe("Collection Yml Service", () => { ) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.deleteSubfolderFromOrder(reqDetails, { collectionName, @@ -465,7 +466,7 @@ describe("Collection Yml Service", () => { ) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.renameSubfolderInOrder(reqDetails, { collectionName, @@ -502,7 +503,7 @@ describe("Collection Yml Service", () => { ] const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = newOrder - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.updateOrder(reqDetails, { collectionName, diff --git a/src/services/fileServices/YmlFileServices/__tests__/NavYmlService.spec.js b/src/services/fileServices/YmlFileServices/__tests__/NavYmlService.spec.js index 5a9be608b..27e212f14 100644 --- a/src/services/fileServices/YmlFileServices/__tests__/NavYmlService.spec.js +++ b/src/services/fileServices/YmlFileServices/__tests__/NavYmlService.spec.js @@ -12,7 +12,8 @@ const { const NAV_FILE_NAME = "navigation.yml" const NAV_FILE_DIR = "_data" -const yaml = require("yaml") +const { sanitizedYamlStringify } = require("@utils/yaml-utils") + const _ = require("lodash") describe("Nav Yml Service", () => { @@ -142,7 +143,7 @@ describe("Nav Yml Service", () => { expect(mockGithubService.update).toHaveBeenCalledWith(reqDetails, { fileName, directoryName, - fileContent: yaml.stringify(updatedMockParsedContent), + fileContent: sanitizedYamlStringify(updatedMockParsedContent), sha: mockNavigationSha, }) }) @@ -182,7 +183,7 @@ describe("Nav Yml Service", () => { expect(mockGithubService.update).toHaveBeenCalledWith(reqDetails, { fileName, directoryName, - fileContent: yaml.stringify(updatedMockParsedContent), + fileContent: sanitizedYamlStringify(updatedMockParsedContent), sha: mockNavigationSha, }) }) @@ -214,7 +215,7 @@ describe("Nav Yml Service", () => { expect(mockGithubService.update).toHaveBeenCalledWith(reqDetails, { fileName, directoryName, - fileContent: yaml.stringify(updatedMockParsedContent), + fileContent: sanitizedYamlStringify(updatedMockParsedContent), sha: mockNavigationSha, }) }) diff --git a/src/services/identity/AuthService.ts b/src/services/identity/AuthService.ts index 8651995e4..508a1b41b 100644 --- a/src/services/identity/AuthService.ts +++ b/src/services/identity/AuthService.ts @@ -1,5 +1,6 @@ import { NotFoundError } from "@errors/NotFoundError" +import UserSessionData from "@root/classes/UserSessionData" import { GitHubService } from "@services/db/GitHubService" interface AuthServiceProps { @@ -14,16 +15,9 @@ class AuthService { this.gitHubService = gitHubService } - async hasAccessToSite( - siteName: string, - userId: string, - accessToken: string - ): Promise { + async hasAccessToSite(sessionData: UserSessionData): Promise { try { - await this.gitHubService.checkHasAccess( - { accessToken, siteName }, - { userId } - ) + await this.gitHubService.checkHasAccess(sessionData) return true } catch (err) { if (err instanceof NotFoundError) { diff --git a/src/services/identity/CollaboratorsService.ts b/src/services/identity/CollaboratorsService.ts new file mode 100644 index 000000000..c9b8b1efb --- /dev/null +++ b/src/services/identity/CollaboratorsService.ts @@ -0,0 +1,292 @@ +import _ from "lodash" +import { ModelStatic, Op } from "sequelize" +import validator from "validator" + +import { ForbiddenError } from "@errors/ForbiddenError" +import { NotFoundError } from "@errors/NotFoundError" +import { UnprocessableError } from "@errors/UnprocessableError" + +import { + CollaboratorRoles, + INACTIVE_USER_THRESHOLD_DAYS, +} from "@constants/constants" + +import { Whitelist, User, Site, SiteMember, Repo } from "@database/models" +import { BadRequestError } from "@root/errors/BadRequestError" +import { ConflictError } from "@root/errors/ConflictError" +import logger from "@root/logger/logger" + +import SitesService from "./SitesService" +import UsersService from "./UsersService" + +interface CollaboratorsServiceProps { + siteRepository: ModelStatic + siteMemberRepository: ModelStatic + sitesService: SitesService + usersService: UsersService + whitelist: ModelStatic +} + +class CollaboratorsService { + // NOTE: Explicitly specifying using keyed properties to ensure + // that the types are synced. + private readonly siteRepository: CollaboratorsServiceProps["siteRepository"] + + private readonly siteMemberRepository: CollaboratorsServiceProps["siteMemberRepository"] + + private readonly sitesService: CollaboratorsServiceProps["sitesService"] + + private readonly usersService: CollaboratorsServiceProps["usersService"] + + private readonly whitelist: CollaboratorsServiceProps["whitelist"] + + constructor({ + siteRepository, + siteMemberRepository, + sitesService, + usersService, + whitelist, + }: CollaboratorsServiceProps) { + this.siteRepository = siteRepository + this.siteMemberRepository = siteMemberRepository + this.sitesService = sitesService + this.usersService = usersService + this.whitelist = whitelist + } + + deriveAllowedRoleFromEmail = async (fullEmail: string) => { + const whitelistEntries = await this.whitelist.findAll({ + where: { + expiry: { + [Op.or]: [{ [Op.is]: null }, { [Op.gt]: new Date() }], + }, + }, + }) + + const matchedDomains = whitelistEntries.filter((entry) => + fullEmail.endsWith(entry.email) + ) + + if (!matchedDomains.length) return null + + // TODO: Modify this method because the presence of the expiry field is not + // the best way of differentiating Admin/Contributor roles + return matchedDomains[0].expiry + ? CollaboratorRoles.Contributor + : CollaboratorRoles.Admin + } + + list = async (siteName: string, requesterId?: string) => { + // Note: + // =============================================== + // We need to query from the Sites table instead of the SiteMembers table + // because Sequelize only recognizes that there is a relationship between Sites <-> Users. + // This means that we cannot retrieve joins if we start the query in the SiteMembers table. + // + // However, the converse is possible, i.e. we can query the Sites table and retrieve joined + // records from the Users table, along with the SiteMember records. + const site = await this.siteRepository.findOne({ + include: [ + { + model: User, + as: "site_members", + attributes: { + // Hide PII such as contactNumber + exclude: ["contactNumber"], + }, + }, + { + model: Repo, + where: { + name: siteName, + }, + }, + ], + }) + const collaborators = site?.site_members ?? [] + + // We perform the following sort via application code because: + // - sorting it via the ORM code alone is quite complicated + // - putting the sorting logic into a stored SQL function involves DB migration work + // - we can achieve this easily with lodash, and there is unlikely to be a performance hit + // given the small number of collaborators in each site + return _.orderBy( + collaborators, + [ + // Prioritize Admins over Contributors + (collaborator) => + collaborator.SiteMember.role === CollaboratorRoles.Admin, + // Prioritize elements where the userId matches the requesterId (i.e. "you") + (collaborator) => collaborator.id.toString() === requesterId, + // Prioritize the user that has not logged in for the longest time + (collaborator) => collaborator.lastLoggedIn, + ], + ["desc", "desc", "asc"] + ) + } + + create = async (siteName: string, email: string, acknowledged: boolean) => { + if (!email || !validator.isEmail(email)) { + return new BadRequestError( + "That doesn't look like a valid email. Try a gov.sg or other whitelisted email." + ) + } + + // 1. Check if email address is whitelisted, and derive the collaborator role + const derivedRole = await this.deriveAllowedRoleFromEmail(email) + if (!derivedRole) { + // Error - the user email is not whitelisted + logger.error( + `create collaborators error: user email ${email} is not whitelisted` + ) + return new ForbiddenError( + `This collaborator couldn't be added. Visit our guide for more assistance.` + ) + } + + // 2. Check if site exists + const site = await this.sitesService.getBySiteName(siteName) + if (!site) { + // Error - site does not exist + logger.error(`create collaborators error: site ${siteName} is not valid`) + return new NotFoundError(`Site does not exist`) + } + + // 3. Check if valid user exists + const user = await this.usersService.findByEmail(email) + if (!user) { + // Error - user with a valid gov email does not exist + logger.error(`create collaborators error: user ${email} is not valid`) + return new NotFoundError( + `This user does not have an Isomer account. Ask them to log in to Isomer and try adding them again.` + ) + } + + // 4. Check if user is already a site member + const existingSiteMember = await this.siteMemberRepository.findOne({ + where: { + siteId: site.id, + userId: user.id, + }, + }) + if (existingSiteMember) { + return new ConflictError(`User is already a member of the site`) + } + + // 5. Ensure that acknowledgement is true if the email role is contributor + if (derivedRole === CollaboratorRoles.Contributor && !acknowledged) { + return new UnprocessableError("Acknowledgement required") + } + + // 6. Create the SiteMembers record + return this.siteMemberRepository.create({ + siteId: site.id, + userId: user.id, + role: derivedRole, + }) + } + + delete = async (siteName: string, userId: string) => { + const site = await this.siteRepository.findOne({ + include: [ + { + model: User, + as: "site_members", + }, + { + model: Repo, + where: { + name: siteName, + }, + }, + ], + }) + + const siteMembers = site?.site_members ?? [] + const isUserSiteMember = + _.filter(siteMembers, (member) => member.id.toString() === userId) + .length > 0 + if (!isUserSiteMember) { + return new NotFoundError(`User is not a site member`) + } + + const siteAdmins = siteMembers.filter( + (member) => member.SiteMember.role === CollaboratorRoles.Admin + ) + if ( + siteAdmins.length === 1 && + siteAdmins[0].id.toString() === userId // Required to check if the collaborator being deleted is an admin + ) { + return new UnprocessableError(`Cannot delete final site admin`) + } + + return this.siteMemberRepository.destroy({ + where: { siteId: site?.id, userId }, + }) + } + + getRole = async ( + siteName: string, + userId: string + ): Promise => { + const site = await this.siteRepository.findOne({ + include: [ + { + model: User, + as: "site_members", + where: { + id: userId, + }, + }, + { + model: Repo, + where: { + name: siteName, + }, + }, + ], + }) + + return site?.site_members?.[0]?.SiteMember?.role ?? null + } + + getStatistics = async (siteName: string) => { + const inactiveLimit = new Date() + inactiveLimit.setDate( + inactiveLimit.getDate() - INACTIVE_USER_THRESHOLD_DAYS + ) + const site = await this.siteRepository.findOne({ + include: [ + { + model: User, + as: "site_members", + }, + { + model: Repo, + where: { + name: siteName, + }, + }, + ], + }) + + const collaborators = site?.site_members ?? [] + const totalCount = collaborators.length + + if (totalCount === 0) { + // Every site must have at least one collaborator + return new NotFoundError(`Site does not exist`) + } + + const inactiveCount = collaborators.filter( + (collaborator) => collaborator.lastLoggedIn < inactiveLimit + ).length + + return { + total: totalCount, + inactive: inactiveCount, + } + } +} + +export default CollaboratorsService diff --git a/src/services/identity/DeploymentClient.ts b/src/services/identity/DeploymentClient.ts index 9ff2b8c30..3009dc5f8 100644 --- a/src/services/identity/DeploymentClient.ts +++ b/src/services/identity/DeploymentClient.ts @@ -10,9 +10,12 @@ import { } from "@aws-sdk/client-amplify" import { ResultAsync } from "neverthrow" +import { config } from "@config/config" + import { AmplifyError } from "@root/types/index" -const { AWS_REGION, SYSTEM_GITHUB_TOKEN } = process.env +const AWS_REGION = config.get("aws.amplify.region") +const SYSTEM_GITHUB_TOKEN = config.get("github.systemToken") const AMPLIFY_BUILD_SPEC = ` version: 1 diff --git a/src/services/identity/IsomerAdminsService.ts b/src/services/identity/IsomerAdminsService.ts new file mode 100644 index 000000000..4ac712814 --- /dev/null +++ b/src/services/identity/IsomerAdminsService.ts @@ -0,0 +1,27 @@ +import { ModelStatic } from "sequelize" + +import { IsomerAdmin } from "@database/models" + +interface IsomerAdminsServiceProps { + repository: ModelStatic +} + +class IsomerAdminsService { + // NOTE: Explicitly specifying using keyed properties to ensure + // that the types are synced. + private readonly repository: IsomerAdminsServiceProps["repository"] + + constructor({ repository }: IsomerAdminsServiceProps) { + this.repository = repository + } + + async getByUserId(userId: string): Promise { + const site = await this.repository.findOne({ + where: { userId }, + }) + + return site + } +} + +export default IsomerAdminsService diff --git a/src/services/identity/NotificationsService.ts b/src/services/identity/NotificationsService.ts new file mode 100644 index 000000000..fcbde7221 --- /dev/null +++ b/src/services/identity/NotificationsService.ts @@ -0,0 +1,241 @@ +import { FindOptions, ModelStatic, Op, Sequelize } from "sequelize" + +import { Notification, Site, Repo, SiteMember } from "@database/models" +import { + NotificationType, + getNotificationExpiryDate, + getNotificationMessage, + getNotificationPriority, +} from "@root/utils/notification-utils" + +const NUM_RECENT_NOTIFICATIONS = 6 + +interface NotificationsServiceProps { + repository: ModelStatic + siteMember: ModelStatic +} + +export interface NotificationResponse { + message: string + createdAt: Date + link: string + isRead: boolean + sourceUsername: string + type: string +} + +class NotificationsService { + // NOTE: Explicitly specifying using keyed properties to ensure + // that the types are synced. + private readonly repository: NotificationsServiceProps["repository"] + + private readonly siteMember: NotificationsServiceProps["siteMember"] + + constructor({ repository, siteMember }: NotificationsServiceProps) { + this.repository = repository + this.siteMember = siteMember + } + + formatNotifications(notifications: Notification[]) { + return notifications.map((notification) => ({ + message: notification.message, + createdAt: notification.createdAt, + link: notification.link, + isRead: !!notification.firstReadTime, + sourceUsername: notification.sourceUsername, + type: notification.type, + })) + } + + async findAll({ + siteName, + userId, + findOptions, + }: { + siteName: string + userId: string + findOptions?: FindOptions + }) { + // We want separate sorting for unread notifications and read notifications - for unread, high priority notifications should go first + // while for read, newer notifications should be displayed first, regardless of priority + // The second sort criteria only affects unread notifications and is used to allow high priority notifications to be sorted first (priority > created_at) + // Read notifications are unaffected by the second sort criteria and will continue to be sorted in the remaining order (first_read_time > created_at > priority) + return this.repository.findAll({ + where: { + user_id: userId, + }, + order: [ + ["first_read_time", "DESC NULLS FIRST"], + [ + Sequelize.literal( + "CASE WHEN first_read_time IS NULL THEN priority ELSE 999 END" + ), + "ASC", + ], + ["created_at", "DESC"], + ["priority", "ASC"], // Low numbers indicate a higher priority + ], + include: [ + { + model: Site, + as: "site", + required: true, + include: [ + { + model: Repo, + required: true, + where: { + name: siteName, + }, + }, + ], + }, + ], + ...findOptions, + }) + } + + async listRecent({ siteName, userId }: { siteName: string; userId: string }) { + const newNotifications = await this.findAll({ + siteName, + userId, + findOptions: { + where: { + userId, + firstReadTime: { + [Op.eq]: null, + }, + }, + }, + }) + + if (newNotifications.length > 0) + return this.formatNotifications(newNotifications) + + const mostRecentNotifications = await this.findAll({ + siteName, + userId, + findOptions: { + limit: NUM_RECENT_NOTIFICATIONS, + }, + }) + + return this.formatNotifications(mostRecentNotifications) + } + + async listAll({ siteName, userId }: { siteName: string; userId: string }) { + const notifications = await this.findAll({ + siteName, + userId, + }) + return this.formatNotifications(notifications) + } + + async markNotificationsAsRead({ + siteName, + userId, + }: { + siteName: string + userId: string + }) { + const siteMember = await this.siteMember.findOne({ + where: { user_id: userId }, + include: [ + { + model: Site, + required: true, + include: [ + { + model: Repo, + required: true, + where: { + name: siteName, + }, + }, + ], + }, + ], + }) + const readAtDate = new Date() + await this.repository.update( + { + firstReadTime: readAtDate, + }, + { + where: { + site_member_id: siteMember?.id, + first_read_time: null, + }, + } + ) + } + + async create({ + siteMember, + link, + notificationType, + notificationSourceUsername, + }: { + siteMember: SiteMember + link: string + notificationType: NotificationType + notificationSourceUsername: string + }) { + const recentTargetNotification = await this.repository.findOne({ + where: { + user_id: siteMember.userId, + type: notificationType, + created_at: { + [Op.gte]: getNotificationExpiryDate(notificationType), + }, + link, + source_username: notificationSourceUsername, + }, + include: [ + { + model: Site, + as: "site", + required: true, + where: { + id: siteMember.siteId, + }, + }, + ], + }) + + if (recentTargetNotification) { + // Update existing notification + // createdAt is a special column which must be flagged as changed + recentTargetNotification.changed("createdAt", true) + await recentTargetNotification.update( + { + firstReadTime: null, + createdAt: new Date(), + message: getNotificationMessage( + notificationType, + notificationSourceUsername + ), + }, + { raw: true } + ) + } else { + // Create new notification + await this.repository.create({ + siteMemberId: siteMember?.id, + siteId: siteMember?.siteId, + userId: siteMember.userId, + message: getNotificationMessage( + notificationType, + notificationSourceUsername + ), // helper method here + link, + sourceUsername: notificationSourceUsername, + type: notificationType, + firstReadTime: null, + priority: getNotificationPriority(notificationType), // get priority + }) + } + } +} + +export default NotificationsService diff --git a/src/services/identity/OtpService.ts b/src/services/identity/OtpService.ts new file mode 100644 index 000000000..3583f997e --- /dev/null +++ b/src/services/identity/OtpService.ts @@ -0,0 +1,28 @@ +import crypto from "crypto" + +import bcrypt from "bcrypt" + +const SALT_TIMES = 10 +const TOTP_LENGTH = 6 + +class OtpService { + private generateOtp = (): string => + // Generates cryptographically strong pseudo-random data. + Array(TOTP_LENGTH) + .fill(0) + .map(() => crypto.randomInt(0, 10)) + .join("") + + generateLoginOtpWithHash = async () => { + const otp = this.generateOtp() + const hashedOtp = await bcrypt.hash(otp, SALT_TIMES) + return { otp, hashedOtp } + } + + verifyOtp = async (otp: string, hashedOtp: string): Promise => { + if (!otp || !hashedOtp) return false + return bcrypt.compare(otp, hashedOtp) + } +} + +export default OtpService diff --git a/src/services/identity/ReposService.ts b/src/services/identity/ReposService.ts index e89d1b528..cc2c0a775 100644 --- a/src/services/identity/ReposService.ts +++ b/src/services/identity/ReposService.ts @@ -7,11 +7,13 @@ import git from "isomorphic-git" import http from "isomorphic-git/http/node" import { ModelStatic } from "sequelize" +import { config } from "@config/config" + import { UnprocessableError } from "@errors/UnprocessableError" import { Repo, Site } from "@database/models" -const { SYSTEM_GITHUB_TOKEN } = process.env +const SYSTEM_GITHUB_TOKEN = config.get("github.systemToken") const octokit = new Octokit({ auth: SYSTEM_GITHUB_TOKEN }) // Constants diff --git a/src/services/identity/SitesService.ts b/src/services/identity/SitesService.ts index 7ed5b124c..88a040060 100644 --- a/src/services/identity/SitesService.ts +++ b/src/services/identity/SitesService.ts @@ -1,34 +1,401 @@ +import _ from "lodash" import { ModelStatic } from "sequelize" -import { Site } from "@database/models" - -import TokenStore from "./TokenStore" +import { Deployment, Repo, Site } from "@database/models" +import type UserSessionData from "@root/classes/UserSessionData" +import type UserWithSiteSessionData from "@root/classes/UserWithSiteSessionData" +import { + ISOMER_GITHUB_ORG_NAME, + ISOMERPAGES_REPO_PAGE_COUNT, + GH_MAX_REPO_COUNT, + ISOMER_ADMIN_REPOS, +} from "@root/constants" +import { NotFoundError } from "@root/errors/NotFoundError" +import RequestNotFoundError from "@root/errors/RequestNotFoundError" +import { UnprocessableError } from "@root/errors/UnprocessableError" +import { genericGitHubAxiosInstance } from "@root/services/api/AxiosInstance" +import { GitHubCommitData } from "@root/types/commitData" +import { ConfigYmlData } from "@root/types/configYml" +import type { + GitHubRepositoryData, + RepositoryData, + SiteUrls, +} from "@root/types/repoInfo" +import { SiteInfo } from "@root/types/siteInfo" +import { GitHubService } from "@services/db/GitHubService" +import { ConfigYmlService } from "@services/fileServices/YmlFileServices/ConfigYmlService" +import IsomerAdminsService from "@services/identity/IsomerAdminsService" +import UsersService from "@services/identity/UsersService" +import ReviewRequestService from "@services/review/ReviewRequestService" interface SitesServiceProps { - repository: ModelStatic - tokenStore: TokenStore + siteRepository: ModelStatic + gitHubService: GitHubService + configYmlService: ConfigYmlService + usersService: UsersService + isomerAdminsService: IsomerAdminsService + reviewRequestService: ReviewRequestService } class SitesService { // NOTE: Explicitly specifying using keyed properties to ensure // that the types are synced. - private readonly repository: SitesServiceProps["repository"] + private readonly siteRepository: SitesServiceProps["siteRepository"] + + private readonly gitHubService: SitesServiceProps["gitHubService"] + + private readonly configYmlService: SitesServiceProps["configYmlService"] + + private readonly usersService: SitesServiceProps["usersService"] + + private readonly isomerAdminsService: SitesServiceProps["isomerAdminsService"] + + private readonly reviewRequestService: SitesServiceProps["reviewRequestService"] + + constructor({ + siteRepository, + gitHubService, + configYmlService, + usersService, + isomerAdminsService, + reviewRequestService, + }: SitesServiceProps) { + this.siteRepository = siteRepository + this.gitHubService = gitHubService + this.configYmlService = configYmlService + this.usersService = usersService + this.isomerAdminsService = isomerAdminsService + this.reviewRequestService = reviewRequestService + } + + isGitHubCommitData(commit: unknown): commit is GitHubCommitData { + return ( + !!commit && + (commit as GitHubCommitData).author !== undefined && + (commit as GitHubCommitData).author.name !== undefined && + (commit as GitHubCommitData).author.date !== undefined && + (commit as GitHubCommitData).author.email !== undefined && + (commit as GitHubCommitData).message !== undefined + ) + } + + extractAuthorEmail(commit: GitHubCommitData): string { + const { + author: { email: authorEmail }, + } = commit + return authorEmail + } + + async insertUrlsFromConfigYml( + siteUrls: SiteUrls, + sessionData: UserWithSiteSessionData + ): Promise { + if (siteUrls.staging && siteUrls.prod) { + // We call ConfigYmlService only when necessary + return siteUrls + } + + const { + content: configYmlData, + }: { content: ConfigYmlData } = await this.configYmlService.read( + sessionData + ) + + // Only replace the urls if they are not already present + const newSiteUrls: SiteUrls = { + staging: + configYmlData.staging && !siteUrls.staging + ? configYmlData.staging + : siteUrls.staging, + prod: + configYmlData.prod && !siteUrls.prod + ? configYmlData.prod + : siteUrls.prod, + } + + return newSiteUrls + } + + async insertUrlsFromGitHubDescription( + siteUrls: SiteUrls, + sessionData: UserWithSiteSessionData + ): Promise { + if (siteUrls.staging && siteUrls.prod) { + // We call GitHubService only when necessary + return siteUrls + } + + const { + description, + }: { description: string } = await this.gitHubService.getRepoInfo( + sessionData + ) + + // Retrieve the url from the description + // repo descriptions have varying formats, so we look for the first link + const repoDescTokens = description.replace("/;/g", " ").split(" ") - private readonly tokenStore: SitesServiceProps["tokenStore"] + const stagingUrlFromDesc = repoDescTokens.find( + (token) => token.includes("http") && token.includes("staging") + ) + const prodUrlFromDesc = repoDescTokens.find( + (token) => token.includes("http") && token.includes("prod") + ) - constructor({ repository, tokenStore }: SitesServiceProps) { - this.repository = repository - this.tokenStore = tokenStore + // Only replace the urls if they are not already present + const newSiteUrls: SiteUrls = { + staging: + stagingUrlFromDesc && !siteUrls.staging + ? stagingUrlFromDesc + : siteUrls.staging, + prod: prodUrlFromDesc && !siteUrls.prod ? prodUrlFromDesc : siteUrls.prod, + } + + return newSiteUrls } async getBySiteName(siteName: string): Promise { - const site = await this.repository.findOne({ - where: { name: siteName }, + const site = await this.siteRepository.findOne({ + include: [ + { + model: Repo, + where: { + name: siteName, + }, + }, + ], }) return site } + async getSitesForEmailUser(userId: string) { + const user = await this.usersService.findSitesByUserId(userId) + + if (!user) { + return [] + } + + return user.site_members.map((site) => site.repo?.name) + } + + async getCommitAuthorEmail(commit: GitHubCommitData) { + const { message } = commit + + // Commit message created as part of phase 2 identity + if (message.startsWith("{") && message.endsWith("}")) { + try { + const { userId }: { userId: string } = JSON.parse(message) + const user = await this.usersService.findById(userId) + + if (user && user.email) { + return user.email + } + } catch (e) { + // Do nothing + } + } + + // Legacy style of commits, or if the user is not found + return this.extractAuthorEmail(commit) + } + + async getMergeAuthorEmail( + commit: GitHubCommitData, + sessionData: UserWithSiteSessionData + ) { + const { + author: { name: authorName }, + } = commit + const { siteName } = sessionData + + if (!authorName.startsWith("isomergithub")) { + // Legacy style of commits, or if the user is not found + return this.extractAuthorEmail(commit) + } + + // Commit was made by our common identity GitHub user + const site = await this.getBySiteName(siteName) + if (!site) { + return this.extractAuthorEmail(commit) + } + + // Retrieve the latest merged review request for the site + const possibleReviewRequest = await this.reviewRequestService.getLatestMergedReviewRequest( + site + ) + if (possibleReviewRequest instanceof RequestNotFoundError) { + // No review request found, fallback to the commit author email + return this.extractAuthorEmail(commit) + } + + // Return the email address of the requestor who made the review request + const { + requestor: { email: requestorEmail }, + } = possibleReviewRequest + + if (requestorEmail) { + return requestorEmail + } + + // No email address found, fallback to the commit author email + return this.extractAuthorEmail(commit) + } + + async getUrlsOfSite( + sessionData: UserWithSiteSessionData + ): Promise { + // Tries to get the site urls in the following order: + // 1. From the deployments database table + // 2. From the config.yml file + // 3. From the GitHub repository description + // Otherwise, returns a NotFoundError + const { siteName } = sessionData + + const site = await this.siteRepository.findOne({ + include: [ + { + model: Deployment, + as: "deployment", + }, + { + model: Repo, + where: { + name: siteName, + }, + }, + ], + }) + + // Note: site may be null if the site does not exist + const siteUrls: SiteUrls = { + staging: site?.deployment?.stagingUrl ?? "", + prod: site?.deployment?.productionUrl ?? "", + } + + _.assign( + siteUrls, + await this.insertUrlsFromConfigYml(siteUrls, sessionData) + ) + _.assign( + siteUrls, + await this.insertUrlsFromGitHubDescription(siteUrls, sessionData) + ) + + if (!siteUrls.staging && !siteUrls.prod) { + return new NotFoundError( + `The site ${siteName} does not have a staging or production url` + ) + } + + return siteUrls + } + + async getSites(sessionData: UserSessionData): Promise { + const isEmailUser = sessionData.isEmailUser() + const { isomerUserId: userId } = sessionData + const isAdminUser = !!(await this.isomerAdminsService.getByUserId(userId)) + const { accessToken } = sessionData + const endpoint = `https://api.github.com/orgs/${ISOMER_GITHUB_ORG_NAME}/repos` + + // Simultaneously retrieve all isomerpages repos + const paramsArr = _.fill(Array(ISOMERPAGES_REPO_PAGE_COUNT), null).map( + (_, idx) => ({ + per_page: GH_MAX_REPO_COUNT, + sort: "full_name", + page: idx + 1, + }) + ) + + const allSites = await Promise.all( + paramsArr.map(async (params) => { + const { + data: respData, + }: { + data: GitHubRepositoryData[] + } = await genericGitHubAxiosInstance.get(endpoint, { + headers: { Authorization: `token ${accessToken}` }, + params, + }) + + return respData + .map((gitHubRepoData) => { + const { + pushed_at: updatedAt, + permissions, + name, + private: isPrivate, + } = gitHubRepoData + + return { + lastUpdated: updatedAt, + permissions, + repoName: name, + isPrivate, + } as RepositoryData + }) + .filter( + (repoData) => + repoData.permissions.push === true && + !ISOMER_ADMIN_REPOS.includes(repoData.repoName) + ) + }) + ) + + const flattenedAllSites = _.flatten(allSites) + // Github users are using their own access token, which already filters sites to only those they have write access to + // Admin users should have access to all sites regardless + if (isAdminUser || !isEmailUser) return flattenedAllSites + + // Email users need to have the list of sites filtered to those they have access to in our db, since our centralised token returns all sites + const retrievedSitesByEmail = await this.getSitesForEmailUser(userId) + + return flattenedAllSites.filter((repoData) => + retrievedSitesByEmail.includes(repoData.repoName) + ) + } + + async checkHasAccessForGitHubUser(sessionData: UserWithSiteSessionData) { + await this.gitHubService.checkHasAccess(sessionData) + } + + async getLastUpdated(sessionData: UserWithSiteSessionData): Promise { + const { pushed_at: updatedAt } = await this.gitHubService.getRepoInfo( + sessionData + ) + return updatedAt + } + + async getStagingUrl( + sessionData: UserWithSiteSessionData + ): Promise { + const siteUrls = await this.getUrlsOfSite(sessionData) + if (siteUrls instanceof NotFoundError) { + return new NotFoundError( + `${sessionData.siteName} does not have a staging url` + ) + } + + const { staging } = siteUrls + + return staging + } + + async getSiteUrl( + sessionData: UserWithSiteSessionData + ): Promise { + const siteUrls = await this.getUrlsOfSite(sessionData) + if (siteUrls instanceof NotFoundError) { + return new NotFoundError( + `${sessionData.siteName} does not have a site url` + ) + } + + const { prod } = siteUrls + + return prod + } + async create( createParams: Partial & { name: Site["name"] @@ -36,24 +403,59 @@ class SitesService { creator: Site["creator"] } ) { - return this.repository.create(createParams) + return this.siteRepository.create(createParams) } async update(updateParams: Partial & { id: Site["id"] }) { - return this.repository.update(updateParams, { + return this.siteRepository.update(updateParams, { where: { id: updateParams.id }, }) } - async getSiteAccessToken(siteName: string) { - const site = await this.getBySiteName(siteName) + async getSiteInfo( + sessionData: UserWithSiteSessionData + ): Promise { + const siteUrls = await this.getUrlsOfSite(sessionData) + if (siteUrls instanceof NotFoundError) { + return new UnprocessableError("Unable to retrieve site info") + } + const { staging: stagingUrl, prod: prodUrl } = siteUrls - if (!site) { - return null + const stagingCommit = await this.gitHubService.getLatestCommitOfBranch( + sessionData, + "staging" + ) + + const prodCommit = await this.gitHubService.getLatestCommitOfBranch( + sessionData, + "master" + ) + + if ( + !this.isGitHubCommitData(stagingCommit) || + !this.isGitHubCommitData(prodCommit) + ) { + return new UnprocessableError("Unable to retrieve GitHub commit info") } - const token = await this.tokenStore.getToken(site.apiTokenName) - return token + const { + author: { date: stagingDate }, + } = stagingCommit + const { + author: { date: prodDate }, + } = prodCommit + + const stagingAuthor = await this.getCommitAuthorEmail(stagingCommit) + const prodAuthor = await this.getMergeAuthorEmail(prodCommit, sessionData) + + return { + savedAt: new Date(stagingDate).getTime() || 0, + savedBy: stagingAuthor || "Unknown Author", + publishedAt: new Date(prodDate).getTime() || 0, + publishedBy: prodAuthor || "Unknown Author", + stagingUrl: stagingUrl || "", + siteUrl: prodUrl || "", + } } } diff --git a/src/services/identity/SmsClient.ts b/src/services/identity/SmsClient.ts index b70f5eff0..edebaedc1 100644 --- a/src/services/identity/SmsClient.ts +++ b/src/services/identity/SmsClient.ts @@ -1,10 +1,12 @@ import axios from "axios" +import { config } from "@config/config" + import logger from "@logger/logger" import { AxiosClient } from "@root/types" -const { POSTMAN_SMS_CRED_NAME } = process.env +const POSTMAN_SMS_CRED_NAME = config.get("postman.smsCredName") const POSTMAN_API_URL = "https://api.postman.gov.sg/v1" @@ -12,10 +14,7 @@ class SmsClient { private readonly axiosClient: AxiosClient constructor() { - const { POSTMAN_API_KEY } = process.env - - if (!POSTMAN_API_KEY) - throw new Error("Postman.gov.sg API key cannot be empty.") + const POSTMAN_API_KEY = config.get("postman.apiKey") this.axiosClient = axios.create({ baseURL: POSTMAN_API_URL, @@ -36,7 +35,7 @@ class SmsClient { try { await this.axiosClient.post(endpoint, sms) } catch (err) { - logger.error(err) + logger.error(`Failed to send SMS to ${recipient}: ${err}`) throw new Error("Failed to send SMS.") } } diff --git a/src/services/identity/TokenStore.ts b/src/services/identity/TokenStore.ts deleted file mode 100644 index 8ce3e2e20..000000000 --- a/src/services/identity/TokenStore.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { - SecretsManagerClient, - GetSecretValueCommand, - SecretsManagerClientConfig, -} from "@aws-sdk/client-secrets-manager" - -class TokenStore { - private readonly secretsClient: SecretsManagerClient - - constructor() { - this.secretsClient = this.createClient() - } - - private createClient() { - const { AWS_REGION, AWS_ENDPOINT } = process.env - const config: SecretsManagerClientConfig = { - region: AWS_REGION || "ap-southeast-1", - } - - // Use an alternate AWS endpoint if provided. For testing with localstack - if (AWS_ENDPOINT) config.endpoint = AWS_ENDPOINT - - return new SecretsManagerClient(config) - } - - // NOTE: This is currently stricter than required. - // We can relax the constraint so that it can be undefined in the future. - async getToken(apiTokenName: string): Promise { - const command = new GetSecretValueCommand({ - SecretId: apiTokenName, - }) - const { SecretString: apiToken } = await this.secretsClient.send(command) - return apiToken - } -} - -export default TokenStore diff --git a/src/services/identity/UsersService.ts b/src/services/identity/UsersService.ts index b5438e245..c0c479cf6 100644 --- a/src/services/identity/UsersService.ts +++ b/src/services/identity/UsersService.ts @@ -2,24 +2,37 @@ import { Op, ModelStatic } from "sequelize" import { Sequelize } from "sequelize-typescript" import { RequireAtLeastOne } from "type-fest" -import { User, Whitelist } from "@database/models" +import { config } from "@config/config" + +import { Otp, Repo, Site, User, Whitelist, SiteMember } from "@database/models" +import { BadRequestError } from "@root/errors/BadRequestError" +import { milliSecondsToMinutes } from "@root/utils/time-utils" import SmsClient from "@services/identity/SmsClient" -import TotpGenerator from "@services/identity/TotpGenerator" import MailClient from "@services/utilServices/MailClient" +import OtpService from "./OtpService" + +const OTP_EXPIRY = config.get("auth.otpExpiry") +const MAX_NUM_OTP_ATTEMPTS = config.get("auth.maxNumOtpAttempts") + +enum OtpType { + Email = "EMAIL", + Mobile = "MOBILE", +} + interface UsersServiceProps { - otp: TotpGenerator mailer: MailClient smsClient: SmsClient repository: ModelStatic sequelize: Sequelize whitelist: ModelStatic + otpService: OtpService + otpRepository: ModelStatic } class UsersService { // NOTE: Explicitly specifying using keyed properties to ensure // that the types are synced. - private readonly otp: UsersServiceProps["otp"] private readonly mailer: UsersServiceProps["mailer"] @@ -31,20 +44,30 @@ class UsersService { private readonly whitelist: UsersServiceProps["whitelist"] + private readonly otpService: UsersServiceProps["otpService"] + + private readonly otpRepository: UsersServiceProps["otpRepository"] + constructor({ - otp, mailer, smsClient, repository, sequelize, whitelist, + otpService, + otpRepository, }: UsersServiceProps) { - this.otp = otp this.mailer = mailer this.smsClient = smsClient this.repository = repository this.sequelize = sequelize this.whitelist = whitelist + this.otpService = otpService + this.otpRepository = otpRepository + } + + async findById(id: string) { + return this.repository.findOne({ where: { id } }) } async findByEmail(email: string) { @@ -55,6 +78,72 @@ class UsersService { return this.repository.findOne({ where: { githubId } }) } + async getSiteMember(userId: string, siteName: string): Promise { + return this.repository.findOne({ + where: { id: userId }, + include: [ + { + model: Site, + as: "site_members", + required: true, + include: [ + { + model: Repo, + required: true, + where: { + name: siteName, + }, + }, + ], + }, + ], + }) + } + + async getSiteAdmin(userId: string, siteName: string) { + return this.repository.findOne({ + where: { id: userId, role: "ADMIN" }, + include: [ + { + model: SiteMember, + as: "site_members", + required: true, + include: [ + { + model: Repo, + required: true, + where: { + name: siteName, + }, + }, + ], + }, + ], + }) + } + + async findSitesByUserId( + isomerId: string + ): Promise< + User & { site_members: Array } + > { + // NOTE: The type casting is necessary to allow site_members to be + // safely read + return this.repository.findOne({ + where: { id: isomerId }, + include: [ + { + model: Site, + as: "site_members", + required: true, + include: [{ model: Repo, required: true }], + }, + ], + }) as Promise< + User & { site_members: Array } + > + } + async updateUserByGitHubId( githubId: string, // NOTE: This ensures that the caller passes in at least 1 property of User @@ -63,6 +152,21 @@ class UsersService { await this.repository.update(user, { where: { githubId } }) } + async updateUserByIsomerId( + isomerId: string, + // NOTE: This ensures that the caller passes in at least 1 property of User + user: RequireAtLeastOne + ) { + await this.repository.update(user, { where: { id: isomerId } }) + } + + async findOrCreate(githubId: string | undefined) { + const [user] = await this.repository.findOrCreate({ + where: { githubId }, + }) + return user + } + async login(githubId: string): Promise { return this.sequelize.transaction(async (transaction) => { // NOTE: The service's findOrCreate is not being used here as this requires an explicit transaction @@ -83,6 +187,18 @@ class UsersService { }) } + async loginWithEmail(email: string): Promise { + return this.sequelize.transaction(async (transaction) => { + // NOTE: The service's findOrCreate is not being used here as this requires an explicit transaction + const [user] = await this.repository.findOrCreate({ + where: { email }, + transaction, + }) + user.lastLoggedIn = new Date() + return user.save({ transaction }) + }) + } + async canSendEmailOtp(email: string) { const whitelistEntries = await this.whitelist.findAll({ attributes: ["email"], @@ -99,26 +215,120 @@ class UsersService { } async sendEmailOtp(email: string) { - const otp = this.otp.generate(email) - const expiry = this.otp.getExpiryMinutes() + const { otp, hashedOtp } = await this.otpService.generateLoginOtpWithHash() + + // Reset attempts to login + const otpEntry = await this.otpRepository.findOne({ where: { email } }) + if (!otpEntry) { + // create new entry + await this.createOtpEntry(email, OtpType.Email, hashedOtp) + } else { + await otpEntry?.update({ + hashedOtp, + attempts: 0, + expiresAt: this.getOtpExpiry(), + }) + } const subject = "One-Time Password (OTP) for IsomerCMS" - const html = `

Your OTP is ${otp}. It will expire in ${expiry} minutes. Please use this to verify your email address.

+ const html = `

Your OTP is ${otp}. It will expire in ${milliSecondsToMinutes( + OTP_EXPIRY + )} minutes. Please use this to verify your email address.

If your OTP does not work, please request for a new OTP.

IsomerCMS Support Team

` await this.mailer.sendMail(email, subject, html) } async sendSmsOtp(mobileNumber: string) { - const otp = this.otp.generate(mobileNumber) - const expiry = this.otp.getExpiryMinutes() + const { otp, hashedOtp } = await this.otpService.generateLoginOtpWithHash() + + // Reset attempts to login + const otpEntry = await this.otpRepository.findOne({ + where: { mobileNumber }, + }) + if (!otpEntry) { + await this.createOtpEntry(mobileNumber, OtpType.Mobile, hashedOtp) + } else { + await otpEntry?.update({ hashedOtp, attempts: 0 }) + } - const message = `Your OTP is ${otp}. It will expire in ${expiry} minutes. Please use this to verify your mobile number` + const message = `Your OTP is ${otp}. It will expire in ${milliSecondsToMinutes( + OTP_EXPIRY + )} minutes. Please use this to verify your mobile number` await this.smsClient.sendSms(mobileNumber, message) } - verifyOtp(value: string, otp: string) { - return this.otp.verify(value, otp) + private async verifyOtp(otpEntry: Otp | null, otp: string) { + // TODO: Change all the following to use AuthError after FE fix + if (!otp || otp === "") { + throw new BadRequestError("Empty OTP provided") + } + + if (!otpEntry) { + throw new BadRequestError("OTP not found") + } + + if (otpEntry.attempts >= MAX_NUM_OTP_ATTEMPTS) { + throw new BadRequestError("Max number of attempts reached") + } + + if (!otpEntry?.hashedOtp) { + await otpEntry.destroy() + throw new BadRequestError("Hashed OTP not found") + } + + // increment attempts + await otpEntry.update({ attempts: otpEntry.attempts + 1 }) + + const isValidOtp = await this.otpService.verifyOtp(otp, otpEntry.hashedOtp) + if (!isValidOtp) { + throw new BadRequestError("OTP is not valid") + } + + if (isValidOtp && otpEntry.expiresAt < new Date()) { + await otpEntry.destroy() + throw new BadRequestError("OTP has expired") + } + + // destroy otp before returning true since otp has been "used" + await otpEntry.destroy() + return true + } + + async verifyEmailOtp(email: string, otp: string) { + const otpEntry = await this.otpRepository.findOne({ where: { email } }) + return this.verifyOtp(otpEntry, otp) + } + + async verifyMobileOtp(mobileNumber: string, otp: string) { + const otpEntry = await this.otpRepository.findOne({ + where: { mobileNumber }, + }) + return this.verifyOtp(otpEntry, otp) + } + + private getOtpExpiry() { + return new Date(Date.now() + OTP_EXPIRY) + } + + private async createOtpEntry( + key: string, + keyType: OtpType, + hashedOtp: string + ) { + if (keyType === OtpType.Email) { + await this.otpRepository.create({ + email: key, + hashedOtp, + expiresAt: this.getOtpExpiry(), + }) + } else { + await this.otpRepository.create({ + mobileNumber: key, + hashedOtp, + expiresAt: this.getOtpExpiry(), + }) + } } } diff --git a/src/services/identity/__tests__/AuthService.spec.ts b/src/services/identity/__tests__/AuthService.spec.ts index 01a8a5101..8c545f9fd 100644 --- a/src/services/identity/__tests__/AuthService.spec.ts +++ b/src/services/identity/__tests__/AuthService.spec.ts @@ -2,6 +2,7 @@ import { BadRequestError } from "@errors/BadRequestError" import { NotFoundError } from "@errors/NotFoundError" import { mockAccessToken, mockSiteName, mockUserId } from "@fixtures/identity" +import { mockUserWithSiteSessionData } from "@fixtures/sessionData" import { GitHubService } from "@services/db/GitHubService" import _AuthService from "../AuthService" @@ -25,16 +26,13 @@ describe("Auth Service", () => { // Act const actual = await AuthService.hasAccessToSite( - mockSiteName, - mockUserId, - mockAccessToken + mockUserWithSiteSessionData ) // Assert expect(actual).toBe(expected) expect(mockGitHubService.checkHasAccess).toHaveBeenCalledWith( - mockReqDetails, - mockParams + mockUserWithSiteSessionData ) }) @@ -47,16 +45,13 @@ describe("Auth Service", () => { // Act const actual = await AuthService.hasAccessToSite( - mockSiteName, - mockUserId, - mockAccessToken + mockUserWithSiteSessionData ) // Assert expect(actual).toBe(expected) expect(mockGitHubService.checkHasAccess).toHaveBeenCalledWith( - mockReqDetails, - mockParams + mockUserWithSiteSessionData ) }) @@ -70,17 +65,12 @@ describe("Auth Service", () => { ) // Act - const actual = AuthService.hasAccessToSite( - mockSiteName, - mockUserId, - mockAccessToken - ) + const actual = AuthService.hasAccessToSite(mockUserWithSiteSessionData) // Assert await expect(actual).rejects.toThrow(BadRequestError) expect(mockGitHubService.checkHasAccess).toHaveBeenCalledWith( - mockReqDetails, - mockParams + mockUserWithSiteSessionData ) }) }) diff --git a/src/services/identity/__tests__/CollaboratorsService.spec.ts b/src/services/identity/__tests__/CollaboratorsService.spec.ts new file mode 100644 index 000000000..925aee7ff --- /dev/null +++ b/src/services/identity/__tests__/CollaboratorsService.spec.ts @@ -0,0 +1,545 @@ +import { ModelStatic } from "sequelize" + +import { ForbiddenError } from "@errors/ForbiddenError" +import { NotFoundError } from "@errors/NotFoundError" +import { UnprocessableError } from "@errors/UnprocessableError" + +import { Site, SiteMember, User, Whitelist } from "@database/models" +import { + expectedSortedMockCollaboratorsList, + mockSiteOrmResponseWithAllCollaborators, + mockSiteOrmResponseWithOneAdminCollaborator, + mockSiteOrmResponseWithOneContributorCollaborator, + mockSiteOrmResponseWithNoCollaborators, +} from "@fixtures/identity" +import { + CollaboratorRoles, + INACTIVE_USER_THRESHOLD_DAYS, +} from "@root/constants" +import { BadRequestError } from "@root/errors/BadRequestError" +import { ConflictError } from "@root/errors/ConflictError" +import CollaboratorsService from "@services/identity/CollaboratorsService" +import SitesService from "@services/identity/SitesService" +import UsersService from "@services/identity/UsersService" + +describe("CollaboratorsService", () => { + const mockSiteName = "sitename" + const mockEmailAddress = "test1@test.gov.sg" + const mockSiteId = 1 + const mockUserId = "2" + const mockWhitelistId = 3 + const mockSiteRepo = { + findOne: jest.fn(), + } + const mockSiteMemberRepo = { + destroy: jest.fn(), + findOne: jest.fn(), + create: jest.fn(), + } + const mockWhitelistRepo = { + findAll: jest.fn(), + } + + const mockSitesService = { + getBySiteName: jest.fn(), + } + const mockUsersService = { + findByEmail: jest.fn(), + } + + const collaboratorsService = new CollaboratorsService({ + siteRepository: (mockSiteRepo as unknown) as ModelStatic, + siteMemberRepository: (mockSiteMemberRepo as unknown) as ModelStatic, + sitesService: (mockSitesService as unknown) as SitesService, + usersService: (mockUsersService as unknown) as UsersService, + whitelist: (mockWhitelistRepo as unknown) as ModelStatic, + }) + + // Prevent inter-test pollution of mocks + afterEach(() => jest.clearAllMocks()) + + describe("deriveAllowedRoleFromEmail", () => { + it("should derive admin role for valid admin-eligible emails", async () => { + // Arrange + const mockWhitelistEntries = [ + { + id: mockWhitelistId, + email: mockEmailAddress, + expiry: null, + createdAt: new Date(), + updatedAt: new Date(), + }, + ] + mockWhitelistRepo.findAll.mockResolvedValue( + (mockWhitelistEntries as unknown) as Whitelist[] + ) + + // Act + const role = await collaboratorsService.deriveAllowedRoleFromEmail( + mockEmailAddress + ) + + // Assert + expect(role).toStrictEqual(CollaboratorRoles.Admin) + expect(mockWhitelistRepo.findAll).toHaveBeenCalled() + }) + + it("should derive contributor role for valid contributor-eligible emails", async () => { + // Arrange + const mockWhitelistEntries = [ + { + id: mockWhitelistId, + email: mockEmailAddress, + expiry: new Date(), + createdAt: new Date(), + updatedAt: new Date(), + }, + ] + mockWhitelistRepo.findAll.mockResolvedValue( + (mockWhitelistEntries as unknown) as Whitelist[] + ) + + // Act + const role = await collaboratorsService.deriveAllowedRoleFromEmail( + mockEmailAddress + ) + + // Assert + expect(role).toStrictEqual(CollaboratorRoles.Contributor) + expect(mockWhitelistRepo.findAll).toHaveBeenCalled() + }) + + it("should derive no role for emails from non-whitelisted domains", async () => { + // Arrange + const mockWhitelistEntries: never[] = [] + mockWhitelistRepo.findAll.mockResolvedValue( + mockWhitelistEntries as Whitelist[] + ) + + // Act + const role = await collaboratorsService.deriveAllowedRoleFromEmail( + mockEmailAddress + ) + + // Assert + expect(role).toStrictEqual(null) + expect(mockWhitelistRepo.findAll).toHaveBeenCalled() + }) + }) + + describe("list", () => { + it("should list all collaborators in the correct sequence", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue( + mockSiteOrmResponseWithAllCollaborators + ) + + // Act + const collaborators = await collaboratorsService.list( + mockSiteName, + mockEmailAddress + ) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(collaborators).toStrictEqual(expectedSortedMockCollaboratorsList) + }) + + it("should return empty array if no collaborators are found", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue( + mockSiteOrmResponseWithNoCollaborators + ) + + // Act + const collaborators = await collaboratorsService.list(mockSiteName) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(collaborators).toStrictEqual([]) + }) + + it("should return empty array if no site with the specified id is found", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue([]) + + // Act + const collaborators = await collaboratorsService.list(mockSiteName) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(collaborators).toStrictEqual([]) + }) + }) + + describe("getRole", () => { + it("should retrieve correct admin role", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue( + mockSiteOrmResponseWithOneAdminCollaborator + ) + + // Act + const role = await collaboratorsService.getRole(mockSiteName, mockUserId) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(role).toStrictEqual(CollaboratorRoles.Admin) + }) + + it("should retrieve correct contributor role", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue( + mockSiteOrmResponseWithOneContributorCollaborator + ) + + // Act + const role = await collaboratorsService.getRole(mockSiteName, mockUserId) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(role).toStrictEqual(CollaboratorRoles.Contributor) + }) + + it("should retrieve correct null role if site has no collaborators", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue( + mockSiteOrmResponseWithNoCollaborators + ) + + // Act + const role = await collaboratorsService.getRole(mockSiteName, mockUserId) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(role).toStrictEqual(null) + }) + + it("should retrieve correct null role if site does not exist", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue([]) + + // Act + const role = await collaboratorsService.getRole(mockSiteName, mockUserId) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(role).toStrictEqual(null) + }) + }) + + describe("delete", () => { + it("should delete contributor", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue( + mockSiteOrmResponseWithAllCollaborators + ) + + // Act + await collaboratorsService.delete(mockSiteName, mockUserId) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(mockSiteMemberRepo.destroy).toHaveBeenCalled() + }) + + it("should throw error if user is not a member of the site", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue( + mockSiteOrmResponseWithNoCollaborators + ) + + // Act + const resp = await collaboratorsService.delete(mockSiteName, mockUserId) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(mockSiteMemberRepo.destroy).not.toHaveBeenCalled() + expect(resp instanceof NotFoundError).toBe(true) + }) + + it("should not delete admin if there is only one admin left", async () => { + // Arrange + mockSiteRepo.findOne.mockResolvedValue( + mockSiteOrmResponseWithOneAdminCollaborator + ) + + // Act + const resp = await collaboratorsService.delete(mockSiteName, mockUserId) + + // Assert + expect(mockSiteRepo.findOne).toHaveBeenCalled() + expect(mockSiteMemberRepo.destroy).not.toHaveBeenCalled() + expect(resp instanceof UnprocessableError).toBe(true) + }) + }) + + describe("create", () => { + const mockSiteMemberRecord = { + siteId: mockSiteId, + userId: mockUserId, + role: CollaboratorRoles.Contributor, + } + + it("should create contributor", async () => { + // Arrange + collaboratorsService.deriveAllowedRoleFromEmail = (jest.fn( + () => CollaboratorRoles.Admin + ) as unknown) as () => Promise + mockSitesService.getBySiteName.mockResolvedValue({ id: mockSiteId }) + mockUsersService.findByEmail.mockResolvedValue({ id: mockUserId }) + mockSiteMemberRepo.findOne.mockResolvedValue(null) + mockSiteMemberRepo.create.mockResolvedValue(mockSiteMemberRecord) + + // Act + const resp = await collaboratorsService.create( + mockSiteName, + mockEmailAddress, + true + ) + + // Assert + expect(collaboratorsService.deriveAllowedRoleFromEmail).toBeCalledWith( + mockEmailAddress + ) + expect(mockSitesService.getBySiteName).toBeCalledWith(mockSiteName) + expect(mockUsersService.findByEmail).toBeCalledWith(mockEmailAddress) + expect(mockSiteMemberRepo.findOne).toBeCalled() + expect(mockSiteMemberRepo.create).toBeCalled() + expect(resp).toStrictEqual(mockSiteMemberRecord) + }) + + it("should return error if email is malformed", async () => { + // Arrange + const MALFORMED_EMAIL = "test" + collaboratorsService.deriveAllowedRoleFromEmail = (jest.fn( + () => CollaboratorRoles.Admin + ) as unknown) as () => Promise + mockSitesService.getBySiteName.mockResolvedValue({ id: mockSiteId }) + mockUsersService.findByEmail.mockResolvedValue({ id: mockUserId }) + mockSiteMemberRepo.findOne.mockResolvedValue(null) + mockSiteMemberRepo.create.mockResolvedValue(mockSiteMemberRecord) + + // Act + const resp = await collaboratorsService.create( + mockSiteName, + MALFORMED_EMAIL, + false + ) + + // Assert + expect(collaboratorsService.deriveAllowedRoleFromEmail).not.toBeCalled() + expect(mockSitesService.getBySiteName).not.toBeCalledWith(mockSiteName) + expect(mockUsersService.findByEmail).not.toBeCalledWith(mockEmailAddress) + expect(mockSiteMemberRepo.findOne).not.toBeCalled() + expect(mockSiteMemberRepo.create).not.toBeCalled() + expect(resp instanceof BadRequestError).toBe(true) + }) + + it("should return error if email domain is not whitelisted", async () => { + // Arrange + collaboratorsService.deriveAllowedRoleFromEmail = (jest.fn( + () => null + ) as unknown) as () => Promise + mockSitesService.getBySiteName.mockResolvedValue({ id: mockSiteId }) + mockUsersService.findByEmail.mockResolvedValue({ id: mockUserId }) + mockSiteMemberRepo.findOne.mockResolvedValue(null) + mockSiteMemberRepo.create.mockResolvedValue(mockSiteMemberRecord) + + // Act + const resp = await collaboratorsService.create( + mockSiteName, + mockEmailAddress, + false + ) + + // Assert + expect(collaboratorsService.deriveAllowedRoleFromEmail).toBeCalledWith( + mockEmailAddress + ) + expect(mockSitesService.getBySiteName).not.toBeCalled() + expect(mockUsersService.findByEmail).not.toBeCalled() + expect(mockSiteMemberRepo.findOne).not.toBeCalled() + expect(mockSiteMemberRepo.create).not.toBeCalled() + expect(resp instanceof ForbiddenError).toBe(true) + }) + + it("should return error if site does not exist", async () => { + // Arrange + collaboratorsService.deriveAllowedRoleFromEmail = (jest.fn( + () => CollaboratorRoles.Admin + ) as unknown) as () => Promise + mockSitesService.getBySiteName.mockResolvedValue(null) + mockUsersService.findByEmail.mockResolvedValue({ id: mockUserId }) + mockSiteMemberRepo.findOne.mockResolvedValue(null) + mockSiteMemberRepo.create.mockResolvedValue(mockSiteMemberRecord) + + // Act + const resp = await collaboratorsService.create( + mockSiteName, + mockEmailAddress, + false + ) + + // Assert + expect(collaboratorsService.deriveAllowedRoleFromEmail).toBeCalledWith( + mockEmailAddress + ) + expect(mockSitesService.getBySiteName).toBeCalledWith(mockSiteName) + expect(mockUsersService.findByEmail).not.toBeCalled() + expect(mockSiteMemberRepo.findOne).not.toBeCalled() + expect(mockSiteMemberRepo.create).not.toBeCalled() + expect(resp instanceof NotFoundError).toBe(true) + }) + + it("should return error if user does not exist", async () => { + // Arrange + collaboratorsService.deriveAllowedRoleFromEmail = (jest.fn( + () => CollaboratorRoles.Admin + ) as unknown) as () => Promise + mockSitesService.getBySiteName.mockResolvedValue({ id: mockSiteId }) + mockUsersService.findByEmail.mockResolvedValue(null) + mockSiteMemberRepo.findOne.mockResolvedValue(null) + mockSiteMemberRepo.create.mockResolvedValue(mockSiteMemberRecord) + + // Act + const resp = await collaboratorsService.create( + mockSiteName, + mockEmailAddress, + false + ) + + // Assert + expect(collaboratorsService.deriveAllowedRoleFromEmail).toBeCalledWith( + mockEmailAddress + ) + expect(mockSitesService.getBySiteName).toBeCalledWith(mockSiteName) + expect(mockUsersService.findByEmail).toBeCalledWith(mockEmailAddress) + expect(mockSiteMemberRepo.findOne).not.toBeCalled() + expect(mockSiteMemberRepo.create).not.toBeCalled() + expect(resp instanceof NotFoundError).toBe(true) + }) + + it("should return error if user already is a site member", async () => { + // Arrange + collaboratorsService.deriveAllowedRoleFromEmail = (jest.fn( + () => CollaboratorRoles.Admin + ) as unknown) as () => Promise + mockSitesService.getBySiteName.mockResolvedValue({ id: mockSiteId }) + mockUsersService.findByEmail.mockResolvedValue({ id: mockUserId }) + mockSiteMemberRepo.findOne.mockResolvedValue(mockSiteMemberRecord) + mockSiteMemberRepo.create.mockResolvedValue(mockSiteMemberRecord) + + // Act + const resp = await collaboratorsService.create( + mockSiteName, + mockEmailAddress, + false + ) + + // Assert + expect(collaboratorsService.deriveAllowedRoleFromEmail).toBeCalledWith( + mockEmailAddress + ) + expect(mockSitesService.getBySiteName).toBeCalledWith(mockSiteName) + expect(mockUsersService.findByEmail).toBeCalledWith(mockEmailAddress) + expect(mockSiteMemberRepo.findOne).toBeCalled() + expect(mockSiteMemberRepo.create).not.toBeCalled() + expect(resp instanceof ConflictError).toBe(true) + }) + + it("should return error if acknowledgement is not done and if the user is going to be a contributor", async () => { + // Arrange + collaboratorsService.deriveAllowedRoleFromEmail = (jest.fn( + () => CollaboratorRoles.Contributor + ) as unknown) as () => Promise + mockSitesService.getBySiteName.mockResolvedValue({ id: mockSiteId }) + mockUsersService.findByEmail.mockResolvedValue({ id: mockUserId }) + mockSiteMemberRepo.findOne.mockResolvedValue(null) + mockSiteMemberRepo.create.mockResolvedValue(mockSiteMemberRecord) + + // Act + const resp = await collaboratorsService.create( + mockSiteName, + mockEmailAddress, + false + ) + + // Assert + expect(collaboratorsService.deriveAllowedRoleFromEmail).toBeCalledWith( + mockEmailAddress + ) + expect(mockSitesService.getBySiteName).toBeCalledWith(mockSiteName) + expect(mockUsersService.findByEmail).toBeCalledWith(mockEmailAddress) + expect(mockSiteMemberRepo.findOne).toBeCalled() + expect(mockSiteMemberRepo.create).not.toBeCalled() + expect(resp instanceof UnprocessableError).toBe(true) + }) + }) + + describe("getStatistics", () => { + const inactiveDate = new Date() + inactiveDate.setDate( + inactiveDate.getDate() - INACTIVE_USER_THRESHOLD_DAYS - 1 + ) + const mockActiveCollaborator: Partial = { + lastLoggedIn: new Date(), + } + const mockInactiveCollaborator: Partial = { + lastLoggedIn: inactiveDate, + } + + it("should return non-zero collaborators statistics", async () => { + // Arrange + const expected = { + total: 2, + inactive: 1, + } + mockSiteRepo.findOne.mockResolvedValue({ + site_members: [mockActiveCollaborator, mockInactiveCollaborator], + }) + + // Act + const actual = await collaboratorsService.getStatistics(mockSiteName) + + // Assert + expect(actual).toEqual(expected) + expect(mockSiteRepo.findOne).toBeCalled() + }) + + it("should return zero inactive collaborators statistics if there is none", async () => { + // Arrange + const expected = { + total: 1, + inactive: 0, + } + mockSiteRepo.findOne.mockResolvedValue({ + site_members: [mockActiveCollaborator], + }) + + // Act + const actual = await collaboratorsService.getStatistics(mockSiteName) + + // Assert + expect(actual).toEqual(expected) + expect(mockSiteRepo.findOne).toBeCalled() + }) + + it("should return NotFoundError if site is not found", async () => { + // Arrange + const expected = { + total: 0, + inactive: 0, + } + mockSiteRepo.findOne.mockResolvedValue(null) + + // Act + await expect( + collaboratorsService.getStatistics(mockSiteName) + ).resolves.toBeInstanceOf(NotFoundError) + + // Assert + expect(mockSiteRepo.findOne).toBeCalled() + }) + }) +}) diff --git a/src/services/identity/__tests__/NotificationsService.spec.ts b/src/services/identity/__tests__/NotificationsService.spec.ts new file mode 100644 index 000000000..eb5c296a6 --- /dev/null +++ b/src/services/identity/__tests__/NotificationsService.spec.ts @@ -0,0 +1,211 @@ +import { ModelStatic } from "sequelize/types" + +import { Notification, SiteMember } from "@root/database/models" +import { mockSiteName, mockUserId } from "@root/fixtures/identity" + +import _NotificationsService from "../NotificationsService" + +const MockRepository = { + findOne: jest.fn(), + findAll: jest.fn(), + update: jest.fn(), + create: jest.fn(), +} +const MockSiteMember = { + findOne: jest.fn(), +} + +const NotificationsService = new _NotificationsService({ + repository: (MockRepository as unknown) as ModelStatic, + siteMember: (MockSiteMember as unknown) as ModelStatic, +}) + +const mockNotifications = [ + { + message: "one", + createdAt: "2022-10-04 07:42:31.597857+00", + link: "link", + sourceUsername: "blah", + type: "type", + isRead: true, + }, + { + message: "two", + createdAt: "2022-10-04 07:42:31.597857+00", + link: "link", + sourceUsername: "blah", + type: "type", + isRead: true, + }, + { + message: "three", + createdAt: "2022-10-04 07:42:31.597857+00", + link: "link", + sourceUsername: "blah", + type: "type", + isRead: true, + }, + { + message: "four", + createdAt: "2022-10-04 07:42:31.597857+00", + link: "link", + sourceUsername: "blah", + type: "type", + isRead: true, + }, + { + message: "five", + createdAt: "2022-10-04 07:42:31.597857+00", + link: "link", + sourceUsername: "blah", + type: "type", + isRead: true, + }, + { + message: "six", + createdAt: "2022-10-04 07:42:31.597857+00", + link: "link", + sourceUsername: "blah", + type: "type", + isRead: true, + }, + { + message: "seven", + createdAt: "2022-10-04 07:42:31.597857+00", + link: "link", + sourceUsername: "blah", + type: "type", + isRead: true, + }, +] + +const mockNotificationsResponse = mockNotifications.map((notification) => ({ + ...notification, + firstReadTime: "yes", +})) + +describe("Notification Service", () => { + afterEach(() => jest.clearAllMocks()) + + describe("listRecent", () => { + afterEach(() => jest.clearAllMocks()) + it("should return the most recent 6 notifications by calling listRecent", async () => { + // Arrange + const expected = mockNotifications.slice(0, 6) + + MockRepository.findAll.mockResolvedValueOnce([]) + MockRepository.findAll.mockResolvedValueOnce( + mockNotificationsResponse.slice(0, 6) + ) + + // Act + const actual = NotificationsService.listRecent({ + userId: mockUserId, + siteName: mockSiteName, + }) + + // Assert + await expect(actual).resolves.toStrictEqual(expected) + expect(MockRepository.findAll).toHaveBeenCalledTimes(2) + }) + + it("should return the result directly if new notifications available", async () => { + // Arrange + const expected = mockNotifications.slice(0, 2) + MockRepository.findAll.mockResolvedValueOnce( + mockNotificationsResponse.slice(0, 2) + ) + + // Act + const actual = NotificationsService.listRecent({ + userId: mockUserId, + siteName: mockSiteName, + }) + + // Assert + await expect(actual).resolves.toStrictEqual(expected) + expect(MockRepository.findAll).toHaveBeenCalledTimes(1) + }) + }) + + it("should return all notifications with listAll", async () => { + // Arrange + const expected = mockNotifications + MockRepository.findAll.mockResolvedValueOnce(mockNotificationsResponse) + + // Act + const actual = NotificationsService.listAll({ + userId: mockUserId, + siteName: mockSiteName, + }) + + // Assert + await expect(actual).resolves.toStrictEqual(expected) + expect(MockRepository.findAll).toHaveBeenCalledTimes(1) + }) + + it("should update all notifications with markNotificationsAsRead", async () => { + // Arrange + MockSiteMember.findOne.mockResolvedValueOnce({ id: mockUserId }) + MockRepository.update.mockResolvedValueOnce({}) + + // Act + const actual = NotificationsService.markNotificationsAsRead({ + userId: mockUserId, + siteName: mockSiteName, + }) + + // Assert + await expect(actual).resolves.not.toThrow() + expect(MockSiteMember.findOne).toHaveBeenCalledTimes(1) + expect(MockRepository.update).toHaveBeenCalledTimes(1) + }) + + describe("create", () => { + const mockSiteMember = ({ + userId: mockUserId, + siteId: 1, + } as unknown) as SiteMember + it("should create a new notification if no similar one exists", async () => { + // Arrange + MockSiteMember.findOne.mockResolvedValueOnce({ id: mockUserId }) + MockRepository.findOne.mockResolvedValueOnce(null) + + // Act + const actual = NotificationsService.create({ + siteMember: mockSiteMember, + link: "link", + notificationType: "sent_request", + notificationSourceUsername: "user", + }) + + // Assert + await expect(actual).resolves.not.toThrow() + expect(MockRepository.findOne).toHaveBeenCalledTimes(1) + expect(MockRepository.create).toHaveBeenCalledTimes(1) + }) + + it("should update an existing notification if a similar one exists", async () => { + // Arrange + const notificationUpdate = jest.fn() + MockSiteMember.findOne.mockResolvedValueOnce({ id: mockUserId }) + MockRepository.findOne.mockResolvedValueOnce({ + update: notificationUpdate, + changed: jest.fn(), + }) + + // Act + const actual = NotificationsService.create({ + siteMember: mockSiteMember, + link: "link", + notificationType: "sent_request", + notificationSourceUsername: "user", + }) + + // Assert + await expect(actual).resolves.not.toThrow() + expect(MockRepository.findOne).toHaveBeenCalledTimes(1) + expect(notificationUpdate).toHaveBeenCalledTimes(1) + }) + }) +}) diff --git a/src/services/identity/__tests__/SitesService.spec.ts b/src/services/identity/__tests__/SitesService.spec.ts index a4a10f044..458f27ac2 100644 --- a/src/services/identity/__tests__/SitesService.spec.ts +++ b/src/services/identity/__tests__/SitesService.spec.ts @@ -1,24 +1,95 @@ import { ModelStatic } from "sequelize" -import { Site } from "@database/models" +import { config } from "@config/config" -import _SitesService from "../SitesService" -import TokenStore from "../TokenStore" +import { Deployment, Repo, Site, User } from "@database/models" +import { + MOCK_COMMIT_MESSAGE_OBJECT_ONE, + MOCK_COMMIT_MESSAGE_OBJECT_TWO, + MOCK_GITHUB_NAME_ONE, + MOCK_GITHUB_NAME_TWO, + MOCK_GITHUB_EMAIL_ADDRESS_ONE, + MOCK_GITHUB_EMAIL_ADDRESS_TWO, + MOCK_GITHUB_DATE_ONE, + MOCK_GITHUB_DATE_TWO, + MOCK_COMMIT_MESSAGE_ONE, + MOCK_COMMIT_MESSAGE_TWO, + MOCK_COMMON_ACCESS_TOKEN_GITHUB_NAME, +} from "@fixtures/identity" +import { + repoInfo, + repoInfo2, + adminRepo, + noAccessRepo, + MOCK_STAGING_URL_CONFIGYML, + MOCK_PRODUCTION_URL_CONFIGYML, + MOCK_PRODUCTION_URL_DB, + MOCK_STAGING_URL_DB, + MOCK_STAGING_URL_GITHUB, + MOCK_PRODUCTION_URL_GITHUB, +} from "@fixtures/repoInfo" +import { + mockUserWithSiteSessionData, + mockSessionDataEmailUser, + mockIsomerUserId, + mockEmail, + mockSessionDataEmailUserWithSite, +} from "@fixtures/sessionData" +import mockAxios from "@mocks/axios" +import { NotFoundError } from "@root/errors/NotFoundError" +import RequestNotFoundError from "@root/errors/RequestNotFoundError" +import { UnprocessableError } from "@root/errors/UnprocessableError" +import ReviewRequestService from "@root/services/review/ReviewRequestService" +import { GitHubCommitData } from "@root/types/commitData" +import { ConfigYmlData } from "@root/types/configYml" +import type { RepositoryData, SiteUrls } from "@root/types/repoInfo" +import { SiteInfo } from "@root/types/siteInfo" +import { GitHubService } from "@services/db/GitHubService" +import { ConfigYmlService } from "@services/fileServices/YmlFileServices/ConfigYmlService" +import IsomerAdminsService from "@services/identity/IsomerAdminsService" +import _SitesService from "@services/identity/SitesService" +import UsersService from "@services/identity/UsersService" const MockRepository = { findOne: jest.fn(), } -const MockTokenStore = { - getToken: jest.fn(), +const MockGithubService = { + checkHasAccess: jest.fn(), + getLatestCommitOfBranch: jest.fn(), + getRepoInfo: jest.fn(), +} + +const MockConfigYmlService = { + read: jest.fn(), +} + +const MockUsersService = { + findById: jest.fn(), + findSitesByUserId: jest.fn(), +} + +const MockIsomerAdminsService = { + getByUserId: jest.fn(), +} + +const MockReviewRequestService = { + getLatestMergedReviewRequest: jest.fn(), } const SitesService = new _SitesService({ - repository: (MockRepository as unknown) as ModelStatic, - tokenStore: (MockTokenStore as unknown) as TokenStore, + siteRepository: (MockRepository as unknown) as ModelStatic, + gitHubService: (MockGithubService as unknown) as GitHubService, + configYmlService: (MockConfigYmlService as unknown) as ConfigYmlService, + usersService: (MockUsersService as unknown) as UsersService, + isomerAdminsService: (MockIsomerAdminsService as unknown) as IsomerAdminsService, + reviewRequestService: (MockReviewRequestService as unknown) as ReviewRequestService, }) -const mockSiteToken = "token black" +const SpySitesService = { + extractAuthorEmail: jest.spyOn(SitesService, "extractAuthorEmail"), +} + const mockSiteName = "some site name" const mockSite = ({ name: "i m a site", @@ -30,49 +101,1275 @@ describe("SitesService", () => { // Prevent inter-test pollution of mocks afterEach(() => jest.clearAllMocks()) - it("should call the findOne method of the db model to get the siteName", async () => { - // Arrange - const expected = mockSite - MockRepository.findOne.mockResolvedValue(mockSite) + describe("extractAuthorEmail", () => { + it("should return the email address of the author of the commit", () => { + // Arrange + const expected = MOCK_GITHUB_EMAIL_ADDRESS_ONE + const commitData: GitHubCommitData = { + author: { + name: MOCK_GITHUB_NAME_ONE, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: MOCK_COMMIT_MESSAGE_ONE, + } - // Act - const actual = await SitesService.getBySiteName(mockSiteName) + // Act + const actual = SitesService.extractAuthorEmail(commitData) - // Assert - expect(actual).toBe(expected) - expect(MockRepository.findOne).toBeCalledWith({ - where: { - name: mockSiteName, - }, + // Assert + expect(actual).toEqual(expected) + }) + }) + + describe("insertUrlsFromConfigYml", () => { + it("should insert URLs if both are not already present", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_CONFIGYML, + prod: MOCK_PRODUCTION_URL_CONFIGYML, + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + const configYmlResponse = { + content: { + staging: MOCK_STAGING_URL_CONFIGYML, + prod: MOCK_PRODUCTION_URL_CONFIGYML, + }, + sha: "abc", + } + MockConfigYmlService.read.mockResolvedValueOnce(configYmlResponse) + + // Act + const actual = await SitesService.insertUrlsFromConfigYml( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockConfigYmlService.read).toHaveBeenCalled() + }) + + it("should only insert staging URL if it is not already present", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_CONFIGYML, + prod: MOCK_PRODUCTION_URL_DB, + } + const initial: SiteUrls = { + staging: "", + prod: MOCK_PRODUCTION_URL_DB, + } + const configYmlResponse = { + content: { + staging: MOCK_STAGING_URL_CONFIGYML, + prod: MOCK_PRODUCTION_URL_CONFIGYML, + }, + sha: "abc", + } + MockConfigYmlService.read.mockResolvedValueOnce(configYmlResponse) + + // Act + const actual = await SitesService.insertUrlsFromConfigYml( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockConfigYmlService.read).toHaveBeenCalled() + }) + + it("should only insert production URL if it is not already present", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_DB, + prod: MOCK_PRODUCTION_URL_CONFIGYML, + } + const initial: SiteUrls = { + staging: MOCK_STAGING_URL_DB, + prod: "", + } + const configYmlResponse = { + content: { + staging: MOCK_STAGING_URL_CONFIGYML, + prod: MOCK_PRODUCTION_URL_CONFIGYML, + }, + sha: "abc", + } + MockConfigYmlService.read.mockResolvedValueOnce(configYmlResponse) + + // Act + const actual = await SitesService.insertUrlsFromConfigYml( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockConfigYmlService.read).toHaveBeenCalled() + }) + + it("should not insert URLs if both are already present", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_DB, + prod: MOCK_PRODUCTION_URL_DB, + } + const initial: SiteUrls = { + staging: MOCK_STAGING_URL_DB, + prod: MOCK_PRODUCTION_URL_DB, + } + + // Act + const actual = await SitesService.insertUrlsFromConfigYml( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockConfigYmlService.read).not.toHaveBeenCalled() + }) + + it("should not insert staging URL if it does not exist in config.yml", async () => { + // Arrange + const expected: SiteUrls = { + staging: "", + prod: MOCK_PRODUCTION_URL_CONFIGYML, + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + const configYmlResponse = { + content: { + prod: MOCK_PRODUCTION_URL_CONFIGYML, + }, + sha: "abc", + } + MockConfigYmlService.read.mockResolvedValueOnce(configYmlResponse) + + // Act + const actual = await SitesService.insertUrlsFromConfigYml( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockConfigYmlService.read).toHaveBeenCalled() + }) + + it("should not insert production URL if it does not exist in config.yml", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_CONFIGYML, + prod: "", + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + const configYmlResponse = { + content: { + staging: MOCK_STAGING_URL_CONFIGYML, + }, + sha: "abc", + } + MockConfigYmlService.read.mockResolvedValueOnce(configYmlResponse) + + // Act + const actual = await SitesService.insertUrlsFromConfigYml( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockConfigYmlService.read).toHaveBeenCalled() + }) + + it("should not insert URLs if config.yml does not contain both staging and production URLs", async () => { + // Arrange + const expected: SiteUrls = { + staging: "", + prod: "", + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + const configYmlResponse = { + content: {}, + sha: "abc", + } + MockConfigYmlService.read.mockResolvedValueOnce(configYmlResponse) + + // Act + const actual = await SitesService.insertUrlsFromConfigYml( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockConfigYmlService.read).toHaveBeenCalled() + }) + }) + + describe("insertUrlsFromGitHubDescription", () => { + it("should insert URLs if both are not already present", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_GITHUB, + prod: MOCK_PRODUCTION_URL_GITHUB, + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + MockGithubService.getRepoInfo.mockResolvedValueOnce(repoInfo) + + // Act + const actual = await SitesService.insertUrlsFromGitHubDescription( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + + it("should only insert staging URL if it is not already present", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_GITHUB, + prod: MOCK_PRODUCTION_URL_DB, + } + const initial: SiteUrls = { + staging: "", + prod: MOCK_PRODUCTION_URL_DB, + } + MockGithubService.getRepoInfo.mockResolvedValueOnce(repoInfo) + + // Act + const actual = await SitesService.insertUrlsFromGitHubDescription( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + + it("should only insert production URL if it is not already present", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_DB, + prod: MOCK_PRODUCTION_URL_GITHUB, + } + const initial: SiteUrls = { + staging: MOCK_STAGING_URL_DB, + prod: "", + } + MockGithubService.getRepoInfo.mockResolvedValueOnce(repoInfo) + + // Act + const actual = await SitesService.insertUrlsFromGitHubDescription( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + + it("should not insert URLs if both are already present", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_DB, + prod: MOCK_PRODUCTION_URL_DB, + } + const initial: SiteUrls = { + staging: MOCK_STAGING_URL_DB, + prod: MOCK_PRODUCTION_URL_DB, + } + + // Act + const actual = await SitesService.insertUrlsFromGitHubDescription( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockGithubService.getRepoInfo).not.toHaveBeenCalled() + }) + + it("should not insert staging URL if it does not exist in the description", async () => { + // Arrange + const expected: SiteUrls = { + staging: "", + prod: MOCK_PRODUCTION_URL_CONFIGYML, + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + const repoInfoWithoutStagingUrl = { + description: `Production: ${MOCK_PRODUCTION_URL_CONFIGYML}`, + } + MockGithubService.getRepoInfo.mockResolvedValueOnce( + repoInfoWithoutStagingUrl + ) + + // Act + const actual = await SitesService.insertUrlsFromGitHubDescription( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + + it("should not insert production URL if it does not exist in the description", async () => { + // Arrange + const expected: SiteUrls = { + staging: MOCK_STAGING_URL_CONFIGYML, + prod: "", + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + const repoInfoWithoutProductionUrl = { + description: `Staging: ${MOCK_STAGING_URL_CONFIGYML}`, + } + MockGithubService.getRepoInfo.mockResolvedValueOnce( + repoInfoWithoutProductionUrl + ) + + // Act + const actual = await SitesService.insertUrlsFromGitHubDescription( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + + it("should not insert URLs if description is empty", async () => { + // Arrange + const expected: SiteUrls = { + staging: "", + prod: "", + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + const repoInfoWithoutDescription = { + description: "", + } + MockGithubService.getRepoInfo.mockResolvedValueOnce( + repoInfoWithoutDescription + ) + + // Act + const actual = await SitesService.insertUrlsFromGitHubDescription( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + + it("should not insert URLs if description is some gibberish", async () => { + // Arrange + const expected: SiteUrls = { + staging: "", + prod: "", + } + const initial: SiteUrls = { + staging: "", + prod: "", + } + const repoInfoWithGibberishDescription = { + description: "abcdefghijklmnopqrstuvwxyz-staging and-prod", + } + MockGithubService.getRepoInfo.mockResolvedValueOnce( + repoInfoWithGibberishDescription + ) + + // Act + const actual = await SitesService.insertUrlsFromGitHubDescription( + initial, + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + }) + + describe("getBySiteName", () => { + it("should call the findOne method of the db model to get the siteName", async () => { + // Arrange + const expected = mockSite + MockRepository.findOne.mockResolvedValueOnce(mockSite) + + // Act + const actual = await SitesService.getBySiteName(mockSiteName) + + // Assert + expect(actual).toBe(expected) + expect(MockRepository.findOne).toBeCalledWith({ + include: [ + { + model: Repo, + where: { + name: mockSiteName, + }, + }, + ], + }) + }) + }) + + describe("getSitesForEmailUser", () => { + it("should call the findSitesByUserId method of UsersService to get the sites for the user", async () => { + // Arrange + const expected = [mockSiteName] + const mockUserWithSites = { + site_members: [ + { + repo: { + name: mockSiteName, + }, + }, + ], + } + MockUsersService.findSitesByUserId.mockResolvedValueOnce( + mockUserWithSites + ) + + // Act + const actual = await SitesService.getSitesForEmailUser(mockIsomerUserId) + + // Assert + expect(actual).toEqual(expected) + expect(MockUsersService.findSitesByUserId).toBeCalledWith( + mockIsomerUserId + ) + }) + }) + + describe("getCommitAuthorEmail", () => { + it("should return the email of the commit author who is an email login user", async () => { + // Arrange + const expected = mockEmail + const commit: GitHubCommitData = { + author: { + name: MOCK_GITHUB_NAME_ONE, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: JSON.stringify(MOCK_COMMIT_MESSAGE_OBJECT_ONE), + } + MockUsersService.findById.mockResolvedValueOnce(mockSessionDataEmailUser) + + // Act + const actual = await SitesService.getCommitAuthorEmail(commit) + + // Assert + expect(actual).toBe(expected) + expect(MockUsersService.findById).toHaveBeenCalledWith(mockIsomerUserId) + expect(SpySitesService.extractAuthorEmail).not.toHaveBeenCalled() + }) + + it("should return the email of the commit author who is a GitHub login user", async () => { + // Arrange + const expected = MOCK_GITHUB_EMAIL_ADDRESS_ONE + const commit: GitHubCommitData = { + author: { + name: MOCK_GITHUB_NAME_ONE, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: MOCK_COMMIT_MESSAGE_ONE, + } + + // Act + const actual = await SitesService.getCommitAuthorEmail(commit) + + // Assert + expect(actual).toBe(expected) + expect(MockUsersService.findById).not.toHaveBeenCalled() + expect(SpySitesService.extractAuthorEmail).toHaveBeenCalled() + }) + }) + + describe("getMergeAuthorEmail", () => { + it("should return the email of the merge commit author if it was not performed using the common access token", async () => { + // Arrange + const expected = MOCK_GITHUB_EMAIL_ADDRESS_ONE + const commit: GitHubCommitData = { + author: { + name: MOCK_GITHUB_NAME_ONE, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: MOCK_COMMIT_MESSAGE_ONE, + } + + // Act + const actual = await SitesService.getMergeAuthorEmail( + commit, + mockUserWithSiteSessionData + ) + + // Assert + expect(actual).toEqual(expected) + expect( + MockReviewRequestService.getLatestMergedReviewRequest + ).not.toHaveBeenCalled() + expect(SpySitesService.extractAuthorEmail).toHaveBeenCalled() + }) + + it("should return the email of the merge commit author if the site cannot be found", async () => { + // Arrange + const expected = MOCK_GITHUB_EMAIL_ADDRESS_ONE + const commit: GitHubCommitData = { + author: { + name: MOCK_COMMON_ACCESS_TOKEN_GITHUB_NAME, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: MOCK_COMMIT_MESSAGE_ONE, + } + MockRepository.findOne.mockResolvedValueOnce(null) + + // Act + const actual = await SitesService.getMergeAuthorEmail( + commit, + mockUserWithSiteSessionData + ) + + // Assert + expect(actual).toEqual(expected) + expect( + MockReviewRequestService.getLatestMergedReviewRequest + ).not.toHaveBeenCalled() + expect(SpySitesService.extractAuthorEmail).toHaveBeenCalled() + }) + + it("should return the email of the merge commit author if there are no merged review requests", async () => { + // Arrange + const expected = MOCK_GITHUB_EMAIL_ADDRESS_ONE + const commit: GitHubCommitData = { + author: { + name: MOCK_COMMON_ACCESS_TOKEN_GITHUB_NAME, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: MOCK_COMMIT_MESSAGE_ONE, + } + MockRepository.findOne.mockResolvedValueOnce(mockSite) + MockReviewRequestService.getLatestMergedReviewRequest.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const actual = await SitesService.getMergeAuthorEmail( + commit, + mockUserWithSiteSessionData + ) + + // Assert + expect(actual).toEqual(expected) + expect( + MockReviewRequestService.getLatestMergedReviewRequest + ).toHaveBeenCalledWith(mockSite) + expect(SpySitesService.extractAuthorEmail).toHaveBeenCalled() + }) + + it("should return the email of the requestor for the latest merged review request", async () => { + // Arrange + const expected = mockEmail + const commit: GitHubCommitData = { + author: { + name: MOCK_COMMON_ACCESS_TOKEN_GITHUB_NAME, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: MOCK_COMMIT_MESSAGE_ONE, + } + const mockReviewRequest = { + requestor: { + email: mockEmail, + }, + } + MockRepository.findOne.mockResolvedValueOnce(mockSite) + MockReviewRequestService.getLatestMergedReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + + // Act + const actual = await SitesService.getMergeAuthorEmail( + commit, + mockUserWithSiteSessionData + ) + + // Assert + expect(actual).toEqual(expected) + expect( + MockReviewRequestService.getLatestMergedReviewRequest + ).toHaveBeenCalledWith(mockSite) + expect(SpySitesService.extractAuthorEmail).not.toHaveBeenCalled() + }) + + it("should return the email of the merge commit author if the requestor for the latest merged review request does not have an email", async () => { + // Arrange + const expected = MOCK_GITHUB_EMAIL_ADDRESS_ONE + const commit: GitHubCommitData = { + author: { + name: MOCK_COMMON_ACCESS_TOKEN_GITHUB_NAME, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: MOCK_COMMIT_MESSAGE_ONE, + } + const mockReviewRequest = { + requestor: { + email: null, + }, + } + MockRepository.findOne.mockResolvedValueOnce(mockSite) + MockReviewRequestService.getLatestMergedReviewRequest.mockResolvedValueOnce( + mockReviewRequest + ) + + // Act + const actual = await SitesService.getMergeAuthorEmail( + commit, + mockUserWithSiteSessionData + ) + + // Assert + expect(actual).toEqual(expected) + expect( + MockReviewRequestService.getLatestMergedReviewRequest + ).toHaveBeenCalledWith(mockSite) + expect(SpySitesService.extractAuthorEmail).toHaveBeenCalled() + }) + }) + + describe("getUrlsOfSite", () => { + const deployment: Partial = { + stagingUrl: MOCK_STAGING_URL_DB, + productionUrl: MOCK_PRODUCTION_URL_DB, + } + const emptyDeployment: Partial = { + stagingUrl: "", + productionUrl: "", + } + const configYmlData: Partial = { + staging: MOCK_STAGING_URL_CONFIGYML, + prod: MOCK_PRODUCTION_URL_CONFIGYML, + } + const emptyConfigYmlData: Partial = { + staging: "", + prod: "", + } + const gitHubUrls = { + staging: MOCK_STAGING_URL_GITHUB, + prod: MOCK_PRODUCTION_URL_GITHUB, + } + const repoInfo: { description: string } = { + description: `Staging: ${gitHubUrls.staging} | Production: ${gitHubUrls.prod}`, + } + + it("should return the urls of the site from the deployments table", async () => { + // Arrange + const expected = { + staging: deployment.stagingUrl, + prod: deployment.productionUrl, + } + const mockSiteWithDeployment = { + ...mockSite, + deployment, + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithDeployment) + + // Act + const actual = await SitesService.getUrlsOfSite( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockConfigYmlService.read).not.toHaveBeenCalled() + expect(MockGithubService.getRepoInfo).not.toHaveBeenCalled() + }) + + it("should return the urls of the site from the _config.yml file", async () => { + // Arrange + const expected = { + staging: configYmlData.staging, + prod: configYmlData.prod, + } + const mockSiteWithNullDeployment = { + ...mockSite, + deployment: { + ...emptyDeployment, + }, + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithNullDeployment) + MockConfigYmlService.read.mockResolvedValueOnce({ + content: configYmlData, + }) + + // Act + const actual = await SitesService.getUrlsOfSite( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockConfigYmlService.read).toHaveBeenCalled() + expect(MockGithubService.getRepoInfo).not.toHaveBeenCalled() + }) + + it("should return the urls of the site from the GitHub repo description", async () => { + // Arrange + const expected = { + staging: gitHubUrls.staging, + prod: gitHubUrls.prod, + } + const mockSiteWithNullDeployment = { + ...mockSite, + deployment: { + ...emptyDeployment, + }, + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithNullDeployment) + MockConfigYmlService.read.mockResolvedValueOnce({ + content: { + ...emptyConfigYmlData, + }, + }) + MockGithubService.getRepoInfo.mockResolvedValueOnce(repoInfo) + + // Act + const actual = await SitesService.getUrlsOfSite( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockConfigYmlService.read).toHaveBeenCalled() + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + + it("should return a NotFoundError if all fails", async () => { + // Arrange + const mockSiteWithNullDeployment = { + ...mockSite, + deployment: { + ...emptyDeployment, + }, + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithNullDeployment) + MockConfigYmlService.read.mockResolvedValueOnce({ + content: { + ...emptyConfigYmlData, + }, + }) + MockGithubService.getRepoInfo.mockResolvedValueOnce({ + description: "", + }) + + // Act + const actual = await SitesService.getUrlsOfSite( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toBeInstanceOf(NotFoundError) + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockConfigYmlService.read).toHaveBeenCalled() + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + }) + + describe("getSites", () => { + it("Filters accessible sites for github user correctly", async () => { + // Store the API key and set it later so that other tests are not affected + const currRepoCount = config.get("sites.pageCount") + config.set("sites.pageCount", 3) + + const expectedResp = [ + { + lastUpdated: repoInfo.pushed_at, + permissions: repoInfo.permissions, + repoName: repoInfo.name, + isPrivate: repoInfo.private, + }, + { + lastUpdated: repoInfo2.pushed_at, + permissions: repoInfo2.permissions, + repoName: repoInfo2.name, + isPrivate: repoInfo2.private, + }, + ] + MockIsomerAdminsService.getByUserId.mockImplementationOnce(() => null) + mockAxios.get.mockResolvedValueOnce({ + data: [repoInfo, repoInfo2, adminRepo, noAccessRepo], + }) + mockAxios.get.mockResolvedValueOnce({ data: [] }) + mockAxios.get.mockResolvedValueOnce({ data: [] }) + + await expect( + SitesService.getSites(mockUserWithSiteSessionData) + ).resolves.toMatchObject(expectedResp) + + expect(mockAxios.get).toHaveBeenCalledTimes(3) + config.set("sites.pageCount", currRepoCount) + expect(config.get("sites.pageCount")).toBe(currRepoCount) + }) + + it("Filters accessible sites for email user correctly", async () => { + // Store the API key and set it later so that other tests are not affected + const currRepoCount = config.get("sites.pageCount") + config.set("sites.pageCount", 3) + + const expectedResp: RepositoryData[] = [ + { + lastUpdated: repoInfo.pushed_at, + permissions: repoInfo.permissions, + repoName: repoInfo.name, + isPrivate: repoInfo.private, + }, + ] + MockIsomerAdminsService.getByUserId.mockImplementationOnce(() => null) + MockUsersService.findSitesByUserId.mockImplementationOnce(() => ({ + site_members: [{ repo: { name: repoInfo.name } }], + })) + mockAxios.get.mockResolvedValueOnce({ + data: [repoInfo, repoInfo2, adminRepo, noAccessRepo], + }) + mockAxios.get.mockResolvedValueOnce({ data: [] }) + mockAxios.get.mockResolvedValueOnce({ data: [] }) + + await expect( + SitesService.getSites(mockSessionDataEmailUser) + ).resolves.toMatchObject(expectedResp) + + expect(MockIsomerAdminsService.getByUserId).toHaveBeenCalledWith( + mockIsomerUserId + ) + expect(mockAxios.get).toHaveBeenCalledTimes(3) + config.set("sites.pageCount", currRepoCount) + expect(config.get("sites.pageCount")).toBe(currRepoCount) + }) + + it("Filters accessible sites for email user with no sites correctly", async () => { + // Store the API key and set it later so that other tests are not affected + const currRepoCount = config.get("sites.pageCount") + config.set("sites.pageCount", 3) + + const expectedResp: RepositoryData[] = [] + MockIsomerAdminsService.getByUserId.mockImplementationOnce(() => null) + MockUsersService.findSitesByUserId.mockImplementationOnce(() => null) + mockAxios.get.mockResolvedValueOnce({ + data: [repoInfo, repoInfo2, adminRepo, noAccessRepo], + }) + mockAxios.get.mockResolvedValueOnce({ data: [] }) + mockAxios.get.mockResolvedValueOnce({ data: [] }) + + await expect( + SitesService.getSites(mockSessionDataEmailUser) + ).resolves.toMatchObject(expectedResp) + + expect(MockIsomerAdminsService.getByUserId).toHaveBeenCalledWith( + mockIsomerUserId + ) + expect(MockUsersService.findSitesByUserId).toHaveBeenCalledWith( + mockIsomerUserId + ) + expect(mockAxios.get).toHaveBeenCalledTimes(3) + config.set("sites.pageCount", currRepoCount) + expect(config.get("sites.pageCount")).toBe(currRepoCount) + }) + + it("Returns all accessible sites for admin user correctly", async () => { + // Store the API key and set it later so that other tests are not affected + const currRepoCount = config.get("sites.pageCount") + config.set("sites.pageCount", 3) + + const expectedResp = [ + { + lastUpdated: repoInfo.pushed_at, + permissions: repoInfo.permissions, + repoName: repoInfo.name, + isPrivate: repoInfo.private, + }, + { + lastUpdated: repoInfo2.pushed_at, + permissions: repoInfo2.permissions, + repoName: repoInfo2.name, + isPrivate: repoInfo2.private, + }, + ] + MockIsomerAdminsService.getByUserId.mockImplementationOnce(() => "user") + MockUsersService.findSitesByUserId.mockImplementationOnce(() => [ + repoInfo.name, + repoInfo2.name, + ]) + mockAxios.get.mockResolvedValueOnce({ + data: [repoInfo, repoInfo2, adminRepo, noAccessRepo], + }) + mockAxios.get.mockResolvedValueOnce({ data: [] }) + mockAxios.get.mockResolvedValueOnce({ data: [] }) + + await expect( + SitesService.getSites(mockUserWithSiteSessionData) + ).resolves.toMatchObject(expectedResp) + + expect(MockIsomerAdminsService.getByUserId).toHaveBeenCalledWith( + mockIsomerUserId + ) + expect(mockAxios.get).toHaveBeenCalledTimes(3) + config.set("sites.pageCount", currRepoCount) + expect(config.get("sites.pageCount")).toBe(currRepoCount) + }) + }) + + describe("checkHasAccessForGitHubUser", () => { + it("Checks if a user has access to a site", async () => { + await expect( + SitesService.checkHasAccessForGitHubUser(mockUserWithSiteSessionData) + ).resolves.not.toThrow() + + expect(MockGithubService.checkHasAccess).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) }) }) - it("should call the underlying getToken method of the token store when the site exists", async () => { - // Arrange - const expected = mockSiteToken - const getSpy = jest.spyOn(SitesService, "getBySiteName") - getSpy.mockResolvedValueOnce(mockSite) - MockTokenStore.getToken.mockResolvedValue(mockSiteToken) + describe("getLastUpdated", () => { + it("Checks when site was last updated", async () => { + MockGithubService.getRepoInfo.mockResolvedValueOnce(repoInfo) + + await expect( + SitesService.getLastUpdated(mockUserWithSiteSessionData) + ).resolves.toEqual(repoInfo.pushed_at) + + expect(MockGithubService.getRepoInfo).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) + }) + }) + + describe("getStagingUrl", () => { + it("should return the staging URL if it is available", async () => { + // Arrange + const mockSiteWithDeployment = { + ...mockSite, + deployment: { + stagingUrl: MOCK_STAGING_URL_DB, + productionUrl: MOCK_PRODUCTION_URL_DB, + }, + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithDeployment) - // Act - const actual = await SitesService.getSiteAccessToken(mockSiteName) + // Act + const actual = await SitesService.getStagingUrl( + mockSessionDataEmailUserWithSite + ) - // Assert - expect(actual).toBe(expected) - expect(getSpy).toBeCalledWith(mockSiteName) - expect(MockTokenStore.getToken).toBeCalledWith(mockSite.apiTokenName) + // Assert + expect(actual).toEqual(MOCK_STAGING_URL_DB) + expect(MockRepository.findOne).toHaveBeenCalled() + }) + + it("should return an error when the staging url for a repo is not found", async () => { + // Arrange + MockRepository.findOne.mockResolvedValueOnce(null) + MockConfigYmlService.read.mockResolvedValueOnce({ + content: {}, + }) + MockGithubService.getRepoInfo.mockResolvedValueOnce({ + description: "", + }) + + // Act + await expect( + SitesService.getStagingUrl(mockUserWithSiteSessionData) + ).resolves.toBeInstanceOf(NotFoundError) + + // Assert + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockConfigYmlService.read).toHaveBeenCalled() + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) }) - it("should return null when there is no site with siteName", async () => { - // Arrange - const getSpy = jest.spyOn(SitesService, "getBySiteName") - getSpy.mockResolvedValueOnce(null) + describe("getSiteUrl", () => { + it("should return the site URL if it is available", async () => { + // Arrange + const mockSiteWithDeployment = { + ...mockSite, + deployment: { + stagingUrl: MOCK_STAGING_URL_DB, + productionUrl: MOCK_PRODUCTION_URL_DB, + }, + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithDeployment) + + // Act + const actual = await SitesService.getSiteUrl( + mockSessionDataEmailUserWithSite + ) - // Act - const actual = await SitesService.getSiteAccessToken(mockSiteName) + // Assert + expect(actual).toEqual(MOCK_PRODUCTION_URL_DB) + expect(MockRepository.findOne).toHaveBeenCalled() + }) + + it("should return an error when the site url for a repo is not found", async () => { + // Arrange + MockRepository.findOne.mockResolvedValueOnce(null) + MockConfigYmlService.read.mockResolvedValueOnce({ + content: {}, + }) + MockGithubService.getRepoInfo.mockResolvedValueOnce({ + description: "", + }) + + // Act + await expect( + SitesService.getSiteUrl(mockUserWithSiteSessionData) + ).resolves.toBeInstanceOf(NotFoundError) + + // Assert + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockConfigYmlService.read).toHaveBeenCalled() + expect(MockGithubService.getRepoInfo).toHaveBeenCalled() + }) + }) - // Assert - expect(actual).toBeNull() - expect(MockTokenStore.getToken).not.toBeCalled() + describe("getSiteInfo", () => { + const mockSiteWithDeployment = { + ...mockSite, + deployment: { + stagingUrl: MOCK_STAGING_URL_DB, + productionUrl: MOCK_PRODUCTION_URL_DB, + }, + } + + it("should return the site info if authors are email login users", async () => { + // Arrange + const mockStagingCommit: GitHubCommitData = { + author: { + name: MOCK_GITHUB_NAME_ONE, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: JSON.stringify(MOCK_COMMIT_MESSAGE_OBJECT_ONE), + } + const mockStagingCommitAuthor: Partial = { + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + } + const mockProductionCommit: GitHubCommitData = { + author: { + name: MOCK_GITHUB_NAME_TWO, + email: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + date: MOCK_GITHUB_DATE_TWO, + }, + message: JSON.stringify(MOCK_COMMIT_MESSAGE_OBJECT_TWO), + } + const mockProductionCommitAuthor: Partial = { + email: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + } + const expected: SiteInfo = { + savedAt: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + savedBy: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + publishedAt: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + publishedBy: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + stagingUrl: MOCK_STAGING_URL_DB, + siteUrl: MOCK_PRODUCTION_URL_DB, + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithDeployment) + MockGithubService.getLatestCommitOfBranch.mockResolvedValueOnce( + mockStagingCommit + ) + MockGithubService.getLatestCommitOfBranch.mockResolvedValueOnce( + mockProductionCommit + ) + MockUsersService.findById.mockResolvedValueOnce(mockStagingCommitAuthor) + MockUsersService.findById.mockResolvedValueOnce( + mockProductionCommitAuthor + ) + + // Act + const actual = await SitesService.getSiteInfo( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockGithubService.getLatestCommitOfBranch).toHaveBeenCalledTimes(2) + expect(MockUsersService.findById).toHaveBeenCalled() + }) + + it("should return the site info if authors are GitHub login users", async () => { + // Arrange + const mockStagingCommit: GitHubCommitData = { + author: { + name: MOCK_GITHUB_NAME_ONE, + email: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + date: MOCK_GITHUB_DATE_ONE, + }, + message: MOCK_COMMIT_MESSAGE_ONE, + } + const mockProductionCommit: GitHubCommitData = { + author: { + name: MOCK_GITHUB_NAME_TWO, + email: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + date: MOCK_GITHUB_DATE_TWO, + }, + message: MOCK_COMMIT_MESSAGE_TWO, + } + const expected: SiteInfo = { + savedAt: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + savedBy: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + publishedAt: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + publishedBy: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + stagingUrl: MOCK_STAGING_URL_DB, + siteUrl: MOCK_PRODUCTION_URL_DB, + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithDeployment) + MockGithubService.getLatestCommitOfBranch.mockResolvedValueOnce( + mockStagingCommit + ) + MockGithubService.getLatestCommitOfBranch.mockResolvedValueOnce( + mockProductionCommit + ) + + // Act + const actual = await SitesService.getSiteInfo( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockGithubService.getLatestCommitOfBranch).toHaveBeenCalledTimes(2) + expect(MockUsersService.findById).not.toHaveBeenCalled() + }) + + it("should return UnprocessableError when the site is not found", async () => { + // Arrange + MockRepository.findOne.mockResolvedValueOnce(null) + MockConfigYmlService.read.mockResolvedValueOnce({ + content: {}, + }) + MockGithubService.getRepoInfo.mockResolvedValueOnce({ + description: "", + }) + + // Act + await expect( + SitesService.getSiteInfo(mockSessionDataEmailUserWithSite) + ).resolves.toBeInstanceOf(UnprocessableError) + + // Assert + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockUsersService.findById).not.toHaveBeenCalled() + }) + + it("should return UnprocessableError when the GitHub commit is not found", async () => { + // Arrange + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithDeployment) + MockGithubService.getLatestCommitOfBranch.mockResolvedValueOnce(null) + MockGithubService.getLatestCommitOfBranch.mockResolvedValueOnce(null) + + // Act + await expect( + SitesService.getSiteInfo(mockSessionDataEmailUserWithSite) + ).resolves.toBeInstanceOf(UnprocessableError) + + // Assert + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockGithubService.getLatestCommitOfBranch).toHaveBeenCalledTimes(2) + expect(MockUsersService.findById).not.toHaveBeenCalled() + }) + + it("should return with unknown author when the GitHub commit is empty", async () => { + // Arrange + const expected: SiteInfo = { + savedAt: 0, + savedBy: "Unknown Author", + publishedAt: 0, + publishedBy: "Unknown Author", + stagingUrl: MOCK_STAGING_URL_DB, + siteUrl: MOCK_PRODUCTION_URL_DB, + } + + const mockEmptyCommit: GitHubCommitData = { + author: { + name: "", + email: "", + date: "", + }, + message: "", + } + + MockRepository.findOne.mockResolvedValueOnce(mockSiteWithDeployment) + MockGithubService.getLatestCommitOfBranch.mockResolvedValueOnce( + mockEmptyCommit + ) + MockGithubService.getLatestCommitOfBranch.mockResolvedValueOnce( + mockEmptyCommit + ) + + // Act + const actual = await SitesService.getSiteInfo( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockRepository.findOne).toHaveBeenCalled() + expect(MockGithubService.getLatestCommitOfBranch).toHaveBeenCalledTimes(2) + expect(MockUsersService.findById).not.toHaveBeenCalled() + }) }) }) diff --git a/src/services/utilServices/__tests__/SmsClient.spec.ts b/src/services/identity/__tests__/SmsClient.spec.ts similarity index 65% rename from src/services/utilServices/__tests__/SmsClient.spec.ts rename to src/services/identity/__tests__/SmsClient.spec.ts index 560655ff3..cec3eb837 100644 --- a/src/services/utilServices/__tests__/SmsClient.spec.ts +++ b/src/services/identity/__tests__/SmsClient.spec.ts @@ -1,5 +1,7 @@ import mockAxios from "jest-mock-axios" +import { config } from "@config/config" + import { mockBody, mockRecipient } from "@fixtures/identity" import _SmsClient from "@services/identity/SmsClient" @@ -7,7 +9,7 @@ const mockEndpoint = "/transactional/sms/send" const SmsClient = new _SmsClient() -const { POSTMAN_SMS_CRED_NAME } = process.env +const POSTMAN_SMS_CRED_NAME = config.get("postman.smsCredName") const generateSms = (recipient: string, body: string) => ({ recipient, @@ -29,22 +31,6 @@ describe("Sms Client", () => { expect(mockAxios.post).toHaveBeenCalledWith(mockEndpoint, generatedSms) }) - it("should throw an error on initialization when there is no api key", () => { - // Arrange - // Store the API key and set it later so that other tests are not affected - const curApiKey = process.env.POSTMAN_API_KEY - process.env.POSTMAN_API_KEY = "" - - // Act - // NOTE: We require a new instance because the old one would already have the API key bound - const actual = () => new _SmsClient() - - // Assert - expect(actual).toThrowError("Postman.gov.sg API key cannot be empty") - process.env.POSTMAN_API_KEY = curApiKey - expect(process.env.POSTMAN_API_KEY).toBe(curApiKey) - }) - it("should return an error when a network error occurs", async () => { // Arrange const generatedSms = generateSms(mockRecipient, mockBody) diff --git a/src/services/identity/__tests__/TokenStore.spec.ts b/src/services/identity/__tests__/TokenStore.spec.ts deleted file mode 100644 index c522ae542..000000000 --- a/src/services/identity/__tests__/TokenStore.spec.ts +++ /dev/null @@ -1,48 +0,0 @@ -import { - secretsManagerClient as mockSecretsManager, - GetSecretValueCommand, -} from "@mocks/@aws-sdk/client-secrets-manager" - -import _TokenStore from "../TokenStore" - -const apiTokenName = "some token" -const TokenStore = new _TokenStore() - -describe("Token Store", () => { - beforeEach(() => { - // Clear all instances and calls to constructor and all methods: - mockSecretsManager.send.mockClear() - }) - - it("should return the api token when the parameters are valid", async () => { - // Arrange - const expected = "some api" - const awsCommand = new GetSecretValueCommand({ - SecretId: apiTokenName, - }) - mockSecretsManager.send.mockResolvedValueOnce({ SecretString: expected }) - - // Act - const actual = await TokenStore.getToken(apiTokenName) - - // Assert - expect(actual).toBe(expected) - expect(mockSecretsManager.send).toHaveBeenCalledWith(awsCommand) - }) - - it("should return the error when the secrets client fails to retrieve credentials", () => { - // Arrange - const expected = Error("oh noes") - const awsCommand = new GetSecretValueCommand({ - SecretId: apiTokenName, - }) - mockSecretsManager.send.mockRejectedValueOnce(expected) - - // Act - const actual = TokenStore.getToken(apiTokenName) - - // Assert - expect(actual).rejects.toBe(expected) - expect(mockSecretsManager.send).toHaveBeenCalledWith(awsCommand) - }) -}) diff --git a/src/services/utilServices/__tests__/TotpGenerator.spec.ts b/src/services/identity/__tests__/TotpGenerator.spec.ts similarity index 100% rename from src/services/utilServices/__tests__/TotpGenerator.spec.ts rename to src/services/identity/__tests__/TotpGenerator.spec.ts diff --git a/src/services/identity/__tests__/UsersService.spec.ts b/src/services/identity/__tests__/UsersService.spec.ts index 14da61406..c0e7b0286 100644 --- a/src/services/identity/__tests__/UsersService.spec.ts +++ b/src/services/identity/__tests__/UsersService.spec.ts @@ -1,43 +1,53 @@ import { Sequelize } from "sequelize-typescript" import { ModelStatic } from "sequelize/types" -import { User, Whitelist } from "@root/database/models" +import { Otp, User, Whitelist } from "@root/database/models" import SmsClient from "@services/identity/SmsClient" import TotpGenerator from "@services/identity/TotpGenerator" import MailClient from "@services/utilServices/MailClient" +import OtpService from "../OtpService" import _UsersService from "../UsersService" -const MockOtp = { - generate: jest.fn(), - getExpiryMinutes: jest.fn(), - verify: jest.fn(), +const MockOtpService = { + generateLoginOtpWithHash: jest.fn(), + verifyOtp: jest.fn(), } + const MockMailer = ({ sendMail: jest.fn(), } as unknown) as MailClient + const MockSmsClient = { sendSms: jest.fn(), } + const MockRepository = { findOne: jest.fn(), update: jest.fn(), create: jest.fn(), } + const MockSequelize = { transaction: jest.fn((closure) => closure("transaction")), } + const MockWhitelist = { findAll: jest.fn(), } +const MockOtp = { + findOne: jest.fn(), +} + const UsersService = new _UsersService({ - otp: (MockOtp as unknown) as TotpGenerator, mailer: (MockMailer as unknown) as MailClient, smsClient: (MockSmsClient as unknown) as SmsClient, repository: (MockRepository as unknown) as ModelStatic, sequelize: (MockSequelize as unknown) as Sequelize, whitelist: (MockWhitelist as unknown) as ModelStatic, + otpService: (MockOtpService as unknown) as OtpService, + otpRepository: (MockOtp as unknown) as ModelStatic, }) const mockEmail = "someone@tech.gov.sg" diff --git a/src/services/identity/index.ts b/src/services/identity/index.ts index 3d08d6381..a86b5226c 100644 --- a/src/services/identity/index.ts +++ b/src/services/identity/index.ts @@ -1,62 +1,59 @@ import { Sequelize } from "sequelize-typescript" +import { config } from "@config/config" + import logger from "@logger/logger" -import { User, Site, Whitelist } from "@database/models" +import { + User, + Whitelist, + IsomerAdmin, + Notification, + SiteMember, + Otp, +} from "@database/models" import { GitHubService } from "@services/db/GitHubService" import SmsClient from "@services/identity/SmsClient" import TotpGenerator from "@services/identity/TotpGenerator" import { mailer } from "@services/utilServices/MailClient" import AuthService from "./AuthService" -import SitesService from "./SitesService" -import TokenStore from "./TokenStore" +import IsomerAdminsService from "./IsomerAdminsService" +import NotificationsService from "./NotificationsService" +import OtpService from "./OtpService" import UsersService from "./UsersService" -const { - OTP_EXPIRY, - OTP_SECRET, - NODE_ENV, - LOCAL_SITE_ACCESS_TOKEN, -} = process.env - -const IS_LOCAL_DEV = NODE_ENV === "LOCAL_DEV" +const NODE_ENV = config.get("env") +const OTP_SECRET = config.get("auth.otpSecret") +const OTP_EXPIRY = config.get("auth.otpExpiry") -const tokenStore = IS_LOCAL_DEV - ? (({ - getToken: (_apiTokenName: string) => LOCAL_SITE_ACCESS_TOKEN, - } as unknown) as TokenStore) - : new TokenStore() - -if (!OTP_SECRET) { - throw new Error( - "Please ensure that you have set OTP_SECRET in your env vars and that you have sourced them!" - ) -} +const IS_DEV = NODE_ENV === "dev" +// TODO: To remove TOTP const totpGenerator = new TotpGenerator({ - secret: OTP_SECRET!, - expiry: parseInt(OTP_EXPIRY!, 10) ?? undefined, + secret: OTP_SECRET, + expiry: OTP_EXPIRY, }) -const smsClient = IS_LOCAL_DEV +const smsClient = IS_DEV ? ({ sendSms: (_mobileNumber: string, message: string) => logger.info(message), } as SmsClient) : new SmsClient() -export const sitesService = new SitesService({ repository: Site, tokenStore }) +export const otpService = new OtpService() // NOTE: This is because the usersService requires an instance of sequelize // as it requires a transaction for certain methods export const getUsersService = (sequelize: Sequelize) => new UsersService({ repository: User, - otp: totpGenerator, mailer, smsClient, sequelize, whitelist: Whitelist, + otpService, + otpRepository: Otp, }) // NOTE: This is because the identity auth service has an @@ -64,3 +61,12 @@ export const getUsersService = (sequelize: Sequelize) => // the GithubService instance in... export const getIdentityAuthService = (gitHubService: GitHubService) => new AuthService({ gitHubService }) + +export const isomerAdminsService = new IsomerAdminsService({ + repository: IsomerAdmin, +}) + +export const notificationsService = new NotificationsService({ + repository: Notification, + siteMember: SiteMember, +}) diff --git a/src/services/infra/InfraService.ts b/src/services/infra/InfraService.ts index d2a99b063..63b5f5162 100644 --- a/src/services/infra/InfraService.ts +++ b/src/services/infra/InfraService.ts @@ -63,12 +63,7 @@ export default class InfraService { this.queueService = queueService } - createSite = async ( - submissionId: string, - creator: User, - siteName: string, - repoName: string - ) => { + createSite = async (creator: User, siteName: string, repoName: string) => { let site: Site | undefined // For error handling try { // 1. Create a new site record in the Sites table diff --git a/src/services/middlewareServices/AuthMiddlewareService.js b/src/services/middlewareServices/AuthMiddlewareService.js deleted file mode 100644 index f2760c824..000000000 --- a/src/services/middlewareServices/AuthMiddlewareService.js +++ /dev/null @@ -1,144 +0,0 @@ -// Import logger -const logger = require("@logger/logger") - -// Import errors -const { AuthError } = require("@errors/AuthError") -const { NotFoundError } = require("@errors/NotFoundError") - -const jwtUtils = require("@utils/jwt-utils") - -const { BadRequestError } = require("@root/errors/BadRequestError") -const { sitesService } = require("@services/identity") - -const { E2E_TEST_REPO, E2E_TEST_SECRET, E2E_TEST_GH_TOKEN } = process.env -const E2E_TEST_USER = "e2e-test" -const GENERAL_ACCESS_PATHS = ["/v1/sites", "/v1/auth/whoami"] - -class AuthMiddlewareService { - constructor({ identityAuthService }) { - this.identityAuthService = identityAuthService - } - - verifyE2E({ cookies, url }) { - const { isomercmsE2E } = cookies - const urlTokens = url.split("/") // urls take the form "/v1/sites//"" - - if (!isomercmsE2E) return false - - if (isomercmsE2E !== E2E_TEST_SECRET) throw new AuthError("Bad credentials") - - if (urlTokens.length < 3) throw new BadRequestError("Invalid path") - - // General access paths are allowed - if (GENERAL_ACCESS_PATHS.includes(url)) return true - - // Throw an error if accessing a repo other than e2e-test-repo - const repo = urlTokens[3] - if (repo !== E2E_TEST_REPO) - throw new AuthError(`E2E tests can only access the ${E2E_TEST_REPO} repo`) - - return true - } - - verifyJwt({ cookies, url }) { - const { isomercms } = cookies - const isValidE2E = this.verifyE2E({ cookies, url }) - - if (isValidE2E) { - const accessToken = E2E_TEST_GH_TOKEN - const userId = E2E_TEST_USER - return { accessToken, userId } - } - if (!isomercms) { - logger.error(`Authentication error: JWT token expired. Url: ${url}`) - throw new AuthError(`JWT token has expired`) - } - try { - const { - access_token: retrievedToken, - user_id: retrievedId, - isomer_user_id: isomerUserId, - } = jwtUtils.verifyToken(isomercms) - if (!isomerUserId) { - const notLoggedInError = new Error("User not logged in with email") - notLoggedInError.name = "NotLoggedInError" - throw notLoggedInError - } - const accessToken = jwtUtils.decryptToken(retrievedToken) - const userId = retrievedId - return { accessToken, userId } - } catch (err) { - // NOTE: Cookies aren't being logged here because they get caught as "Object object", which is not useful - // The cookies should be converted to a JSON struct before logging - if (err.name === "NotLoggedInError") { - logger.error( - `Authentication error: user not logged in with email. Url: ${url}` - ) - throw new AuthError( - `Authentication error: user not logged in with email` - ) - } else if (err.name === "TokenExpiredError") { - logger.error(`Authentication error: JWT token expired. Url: ${url}`) - throw new AuthError(`JWT token has expired`) - } else { - logger.error( - `Authentication error. Message: ${err.message} Url: ${url}` - ) - } - throw err - } - } - - whoamiAuth({ cookies, url }) { - const isValidE2E = this.verifyE2E({ cookies, url }) - - if (isValidE2E) { - const accessToken = E2E_TEST_GH_TOKEN - const userId = E2E_TEST_USER - return { accessToken, userId } - } - try { - const { isomercms } = cookies - const { access_token: verifiedToken } = jwtUtils.verifyToken(isomercms) - const accessToken = jwtUtils.decryptToken(verifiedToken) - return { accessToken, userId: undefined } - } catch (err) { - return { accessToken: undefined, userId: undefined } - } - } - - async retrieveSiteAccessTokenIfAvailable({ - siteName, - userAccessToken, - userId, - }) { - // Check if site is onboarded to Isomer identity, otherwise continue using user access token - const site = await sitesService.getBySiteName(siteName) - if (!site) { - logger.info( - `Site ${siteName} does not exist in site table. Default to using user access token.` - ) - return undefined - } - - logger.info(`Verifying user's access to ${siteName}`) - - const hasAccessToSite = await this.identityAuthService.hasAccessToSite( - { accessToken: userAccessToken, siteName }, - { userId } - ) - if (!hasAccessToSite) { - throw new NotFoundError("Site does not exist") - } - - const siteAccessToken = await sitesService.getSiteAccessToken(siteName) - logger.info( - `User ${userId} has access to ${siteName}. Using site access token ${site.apiTokenName}.` - ) - return siteAccessToken - } -} - -module.exports = { - AuthMiddlewareService, -} diff --git a/src/services/middlewareServices/AuthenticationMiddlewareService.ts b/src/services/middlewareServices/AuthenticationMiddlewareService.ts new file mode 100644 index 000000000..32c253435 --- /dev/null +++ b/src/services/middlewareServices/AuthenticationMiddlewareService.ts @@ -0,0 +1,109 @@ +// Import logger +import _ from "lodash" + +import { config } from "@config/config" + +import logger from "@logger/logger" + +// Import errors +import { AuthError } from "@errors/AuthError" + +import jwtUtils from "@utils/jwt-utils" + +import { E2E_TEST_EMAIL, E2E_ISOMER_ID } from "@root/constants" +import { BadRequestError } from "@root/errors/BadRequestError" +import { SessionData } from "@root/types/express/session" + +const E2E_TEST_REPO = config.get("cypress.e2eTestRepo") +const E2E_TEST_SECRET = config.get("cypress.e2eTestSecret") +const E2E_TEST_GH_TOKEN = config.get("cypress.e2eTestGithubToken") +const E2E_TEST_USER = "e2e-test" +const GENERAL_ACCESS_PATHS = [ + "/v1/sites", + "/v1/auth/whoami", + "/v2/sites", + "/v2/auth/whoami", +] + +type VerifyAccessProps = SessionData & { + cookies: { + isomercms: string + isomercmsE2E?: string + } + url: string +} + +export default class AuthenticationMiddlewareService { + verifyE2E({ cookies, url }: Omit) { + const { isomercmsE2E } = cookies + const urlTokens = url.split("/") // urls take the form "/v1/sites//"" + + if (!isomercmsE2E) return false + + if (isomercmsE2E !== E2E_TEST_SECRET) throw new AuthError("Bad credentials") + + if (urlTokens.length < 3) throw new BadRequestError("Invalid path") + + // General access paths are allowed + if (GENERAL_ACCESS_PATHS.includes(url)) return true + + // Throw an error if accessing a repo other than e2e-test-repo + const repo = urlTokens[3] + if (repo !== E2E_TEST_REPO) + throw new AuthError(`E2E tests can only access the ${E2E_TEST_REPO} repo`) + + return true + } + + verifyAccess({ cookies, url, userInfo }: VerifyAccessProps) { + const isValidE2E = this.verifyE2E({ cookies, url }) + + if (isValidE2E) { + const accessToken = E2E_TEST_GH_TOKEN + const githubId = E2E_TEST_USER + const isomerUserId = E2E_ISOMER_ID + const email = E2E_TEST_EMAIL + return { accessToken, githubId, isomerUserId, email } + } + try { + if (_.isEmpty(userInfo)) { + const notLoggedInError = new Error("User not logged in with email") + notLoggedInError.name = "NotLoggedInError" + throw notLoggedInError + } + const { + accessToken: retrievedToken, + githubId, + isomerUserId, + email, + } = userInfo + const accessToken = retrievedToken + ? jwtUtils.decryptToken(retrievedToken) + : "" + return { accessToken, githubId, isomerUserId, email } + } catch (err) { + if (!(err instanceof Error)) { + // NOTE: If the error is of an unknown kind, we bubble it up the stack and block access. + throw err + } + // NOTE: Cookies aren't being logged here because they get caught as "Object object", which is not useful + // The cookies should be converted to a JSON struct before logging + if (err.name === "NotLoggedInError") { + logger.error( + `Authentication error: user not logged in with email. Url: ${url}` + ) + throw new AuthError( + `Authentication error: user not logged in with email` + ) + } else if (err.name === "TokenExpiredError") { + logger.error(`Authentication error: JWT token expired. Url: ${url}`) + throw new AuthError(`JWT token has expired`) + } else { + logger.error( + `Authentication error. Message: ${err.message} Url: ${url}` + ) + } + throw err + } + } +} diff --git a/src/services/middlewareServices/AuthorizationMiddlewareService.ts b/src/services/middlewareServices/AuthorizationMiddlewareService.ts new file mode 100644 index 000000000..3715324bb --- /dev/null +++ b/src/services/middlewareServices/AuthorizationMiddlewareService.ts @@ -0,0 +1,99 @@ +import logger from "@logger/logger" + +import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" + +import { CollaboratorRoles, E2E_ISOMER_ID } from "@root/constants" +import { ForbiddenError } from "@root/errors/ForbiddenError" +import AuthService from "@services/identity/AuthService" +import CollaboratorsService from "@services/identity/CollaboratorsService" +import IsomerAdminsService from "@services/identity/IsomerAdminsService" +import UsersService from "@services/identity/UsersService" + +interface AuthorizationMiddlewareServiceProps { + identityAuthService: AuthService + usersService: UsersService + isomerAdminsService: IsomerAdminsService + collaboratorsService: CollaboratorsService +} + +export default class AuthorizationMiddlewareService { + readonly identityAuthService: AuthorizationMiddlewareServiceProps["identityAuthService"] + + readonly usersService: AuthorizationMiddlewareServiceProps["usersService"] + + readonly isomerAdminsService: AuthorizationMiddlewareServiceProps["isomerAdminsService"] + + readonly collaboratorsService: AuthorizationMiddlewareServiceProps["collaboratorsService"] + + constructor({ + identityAuthService, + usersService, + isomerAdminsService, + collaboratorsService, + }: AuthorizationMiddlewareServiceProps) { + this.identityAuthService = identityAuthService + this.usersService = usersService + this.isomerAdminsService = isomerAdminsService + this.collaboratorsService = collaboratorsService + } + + async doesUserHaveCollaboratorLevelAccess( + siteName: string, + userId: string, + collaboratorType: CollaboratorRoles + ) { + const collaboratorRole = await this.collaboratorsService.getRole( + siteName, + userId + ) + return collaboratorType === CollaboratorRoles.Admin + ? collaboratorRole === CollaboratorRoles.Admin + : collaboratorRole === CollaboratorRoles.Admin || + collaboratorRole === CollaboratorRoles.Contributor + } + + async checkIsSiteCollaborator( + sessionData: UserWithSiteSessionData, + collaboratorType: CollaboratorRoles + ) { + // Check if user has access to site + const { siteName, isomerUserId: userId } = sessionData + + // Should always be defined - authorization middleware only exists if siteName is defined + if (!siteName) { + logger.error("No site name in authorization middleware") + return new ForbiddenError() + } + + logger.info(`Verifying user's access to ${siteName}`) + const isSiteCollaboratorOfType = sessionData.isEmailUser() + ? await this.doesUserHaveCollaboratorLevelAccess( + siteName, + userId, + collaboratorType + ) + : await this.identityAuthService.hasAccessToSite(sessionData) + const isIsomerCoreAdmin = await this.isomerAdminsService.getByUserId(userId) + + const isE2EUser = userId === E2E_ISOMER_ID + if (!isSiteCollaboratorOfType && !isIsomerCoreAdmin && !isE2EUser) { + logger.error("Site does not exist") + return new ForbiddenError() + } + + logger.info( + `User ${sessionData.isomerUserId} has ${collaboratorType} access to ${sessionData.siteName}` + ) + } + + async checkIsSiteMember(sessionData: UserWithSiteSessionData) { + return this.checkIsSiteCollaborator( + sessionData, + CollaboratorRoles.Contributor + ) + } + + async checkIsSiteAdmin(sessionData: UserWithSiteSessionData) { + return this.checkIsSiteCollaborator(sessionData, CollaboratorRoles.Admin) + } +} diff --git a/src/services/middlewareServices/__tests__/AuthorizationMiddlewareService.spec.ts b/src/services/middlewareServices/__tests__/AuthorizationMiddlewareService.spec.ts new file mode 100644 index 000000000..3c38e7191 --- /dev/null +++ b/src/services/middlewareServices/__tests__/AuthorizationMiddlewareService.spec.ts @@ -0,0 +1,139 @@ +import { + mockUserWithSiteSessionData, + mockIsomerUserId, + mockSessionDataEmailUserWithSite, + mockSiteName, +} from "@fixtures/sessionData" +import { CollaboratorRoles } from "@root/constants" +import { ForbiddenError } from "@root/errors/ForbiddenError" +import AuthService from "@root/services/identity/AuthService" +import CollaboratorsService from "@root/services/identity/CollaboratorsService" +import IsomerAdminsService from "@root/services/identity/IsomerAdminsService" +import UsersService from "@root/services/identity/UsersService" + +import AuthorizationMiddlewareService from "../AuthorizationMiddlewareService" + +describe("Authorization Middleware Service", () => { + const mockIdentityAuthService = { + hasAccessToSite: jest.fn(), + } + + const mockUsersService = { + hasAccessToSite: jest.fn(), + } + + const mockIsomerAdminsService = { + getByUserId: jest.fn(), + } + + const mockCollaboratorsService = { + getRole: jest.fn(), + } + + const service = new AuthorizationMiddlewareService({ + identityAuthService: (mockIdentityAuthService as unknown) as AuthService, + usersService: (mockUsersService as unknown) as UsersService, + isomerAdminsService: (mockIsomerAdminsService as unknown) as IsomerAdminsService, + collaboratorsService: (mockCollaboratorsService as unknown) as CollaboratorsService, + }) + + beforeEach(() => { + jest.clearAllMocks() + }) + + describe("checkIsSiteMember", () => { + it("Allows access for email users with site access", async () => { + // Arrange + mockIsomerAdminsService.getByUserId.mockImplementationOnce(() => null) + mockCollaboratorsService.getRole.mockImplementationOnce( + () => CollaboratorRoles.Contributor + ) + + // Act + const actual = await service.checkIsSiteMember( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual instanceof ForbiddenError).toBe(false) + expect(mockIdentityAuthService.hasAccessToSite).toHaveBeenCalledTimes(0) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledWith( + mockSiteName, + mockIsomerUserId + ) + expect(mockIsomerAdminsService.getByUserId).toHaveBeenCalledWith( + mockIsomerUserId + ) + }) + + it("Allows access for github users with site access", async () => { + // Arrange + mockIsomerAdminsService.getByUserId.mockImplementationOnce(() => null) + mockIdentityAuthService.hasAccessToSite.mockImplementationOnce(() => true) + + // Act + const actual = await service.checkIsSiteMember( + mockUserWithSiteSessionData + ) + + // Assert + expect(actual instanceof ForbiddenError).toBe(false) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledTimes(0) + expect(mockIdentityAuthService.hasAccessToSite).toHaveBeenCalledWith( + mockUserWithSiteSessionData + ) + expect(mockIsomerAdminsService.getByUserId).toHaveBeenCalledWith( + mockIsomerUserId + ) + }) + + it("Allows access for admin users even without site access", async () => { + // Arrange + mockIsomerAdminsService.getByUserId.mockImplementationOnce( + () => "adminObj" + ) + mockCollaboratorsService.getRole.mockImplementationOnce( + () => CollaboratorRoles.Admin + ) + + // Act + const actual = await service.checkIsSiteMember( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual instanceof ForbiddenError).toBe(false) + expect(mockIdentityAuthService.hasAccessToSite).toHaveBeenCalledTimes(0) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledWith( + mockSiteName, + mockIsomerUserId + ) + expect(mockIsomerAdminsService.getByUserId).toHaveBeenCalledWith( + mockIsomerUserId + ) + }) + + it("Throws error for users without site access", async () => { + // Arrange + mockIsomerAdminsService.getByUserId.mockImplementationOnce(() => null) + mockCollaboratorsService.getRole.mockImplementationOnce(() => null) + + // Act + const actual = await service.checkIsSiteMember( + mockSessionDataEmailUserWithSite + ) + + // Assert + expect(actual) + expect(actual instanceof ForbiddenError).toBe(true) + expect(mockIdentityAuthService.hasAccessToSite).toHaveBeenCalledTimes(0) + expect(mockCollaboratorsService.getRole).toHaveBeenCalledWith( + mockSiteName, + mockIsomerUserId + ) + expect(mockIsomerAdminsService.getByUserId).toHaveBeenCalledWith( + mockIsomerUserId + ) + }) + }) +}) diff --git a/src/services/moverServices/MoverService.js b/src/services/moverServices/MoverService.js index 487f145b7..01b9ad95b 100644 --- a/src/services/moverServices/MoverService.js +++ b/src/services/moverServices/MoverService.js @@ -10,7 +10,7 @@ class MoverService { } async movePage( - reqDetails, + sessionData, { fileName, oldFileCollection, @@ -25,14 +25,14 @@ class MoverService { const { content: { frontMatter, pageBody }, sha, - } = await this.subcollectionPageService.read(reqDetails, { + } = await this.subcollectionPageService.read(sessionData, { fileName, collectionName: oldFileCollection, subcollectionName: oldFileSubcollection, }) fileFrontMatter = frontMatter fileBody = pageBody - await this.subcollectionPageService.delete(reqDetails, { + await this.subcollectionPageService.delete(sessionData, { fileName, collectionName: oldFileCollection, subcollectionName: oldFileSubcollection, @@ -42,13 +42,13 @@ class MoverService { const { content: { frontMatter, pageBody }, sha, - } = await this.collectionPageService.read(reqDetails, { + } = await this.collectionPageService.read(sessionData, { fileName, collectionName: oldFileCollection, }) fileFrontMatter = frontMatter fileBody = pageBody - await this.collectionPageService.delete(reqDetails, { + await this.collectionPageService.delete(sessionData, { fileName, collectionName: oldFileCollection, sha, @@ -57,17 +57,17 @@ class MoverService { const { content: { frontMatter, pageBody }, sha, - } = await this.unlinkedPageService.read(reqDetails, { + } = await this.unlinkedPageService.read(sessionData, { fileName, }) fileFrontMatter = frontMatter fileBody = pageBody - await this.unlinkedPageService.delete(reqDetails, { fileName, sha }) + await this.unlinkedPageService.delete(sessionData, { fileName, sha }) } let createResp if (newFileSubcollection) { - createResp = await this.subcollectionPageService.create(reqDetails, { + createResp = await this.subcollectionPageService.create(sessionData, { fileName, collectionName: newFileCollection, subcollectionName: newFileSubcollection, @@ -76,7 +76,7 @@ class MoverService { shouldIgnoreCheck: true, }) } else if (newFileCollection) { - createResp = await this.collectionPageService.create(reqDetails, { + createResp = await this.collectionPageService.create(sessionData, { fileName, collectionName: newFileCollection, content: fileBody, @@ -84,7 +84,7 @@ class MoverService { shouldIgnoreCheck: true, }) } else { - createResp = await this.unlinkedPageService.create(reqDetails, { + createResp = await this.unlinkedPageService.create(sessionData, { fileName, content: fileBody, frontMatter: fileFrontMatter, diff --git a/src/services/review/ReviewRequestService.ts b/src/services/review/ReviewRequestService.ts new file mode 100644 index 000000000..78c626bdd --- /dev/null +++ b/src/services/review/ReviewRequestService.ts @@ -0,0 +1,626 @@ +import _ from "lodash" +import { ModelStatic } from "sequelize" + +import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" + +import { Reviewer } from "@database/models/Reviewers" +import { ReviewMeta } from "@database/models/ReviewMeta" +import { ReviewRequest } from "@database/models/ReviewRequest" +import { ReviewRequestStatus } from "@root/constants" +import { ReviewRequestView } from "@root/database/models" +import { Site } from "@root/database/models/Site" +import { User } from "@root/database/models/User" +import RequestNotFoundError from "@root/errors/RequestNotFoundError" +import { + CommentItem, + DashboardReviewRequestDto, + EditedItemDto, + FileType, + GithubCommentData, + ReviewRequestDto, +} from "@root/types/dto/review" +import { isIsomerError } from "@root/types/error" +import { Commit, fromGithubCommitMessage } from "@root/types/github" +import { RequestChangeInfo } from "@root/types/review" +import * as ReviewApi from "@services/db/review" + +/** + * NOTE: This class does not belong as a subset of GitHub service. + * This is because GitHub service exists to operate on _files_ + * whereas this operates on pull requests. + * + * Perhaps we could rename Github service into GitHubFile service + * and this into GitHubPullRequest service to make the distinction obvious. + * + * Separately, this also allows us to add typings into this service. + */ +export default class ReviewRequestService { + private readonly apiService: typeof ReviewApi + + private readonly repository: ModelStatic + + private readonly users: ModelStatic + + private readonly reviewers: ModelStatic + + private readonly reviewMeta: ModelStatic + + private readonly reviewRequestView: ModelStatic + + constructor( + apiService: typeof ReviewApi, + users: ModelStatic, + repository: ModelStatic, + reviewers: ModelStatic, + reviewMeta: ModelStatic, + reviewRequestView: ModelStatic + ) { + this.apiService = apiService + this.users = users + this.repository = repository + this.reviewers = reviewers + this.reviewMeta = reviewMeta + this.reviewRequestView = reviewRequestView + } + + compareDiff = async ( + sessionData: UserWithSiteSessionData + ): Promise => { + // Step 1: Get the site name + const { siteName } = sessionData + + // Step 2: Get the list of changed files using Github's API + // Refer here for details; https://docs.github.com/en/rest/commits/commits#compare-two-commits + // Note that we need a triple dot (...) between base and head refs + const { files, commits } = await this.apiService.getCommitDiff(siteName) + + const mappings = await this.computeShaMappings(commits) + + return files.map(({ filename, contents_url }) => { + const fullPath = filename.split("/") + const items = contents_url.split("?ref=") + // NOTE: We have to compute sha this way rather than + // taking the file sha. + // This is because the sha present on the file is + // a checksum of the files contents. + // And the actual commit sha is given by the ref param + const sha = items[items.length - 1] + + return { + type: this.computeFileType(filename), + // NOTE: The string is guaranteed to be non-empty + // and hence this should exist. + name: fullPath.pop() || "", + // NOTE: pop alters in place + path: fullPath, + url: this.computeFileUrl(filename, siteName), + lastEditedBy: mappings[sha]?.author || "Unknown user", + lastEditedTime: mappings[sha]?.unixTime || 0, + } + }) + } + + // TODO + computeFileType = (filename: string): FileType[] => ["page"] + + computeFileUrl = (filename: string, siteName: string) => "www.google.com" + + computeShaMappings = async ( + commits: Commit[] + ): Promise> => { + const mappings: Record = {} + + // NOTE: commits from github are capped at 300. + // This implies that there might possibly be some files + // whose commit isn't being returned. + await Promise.all( + commits.map(async ({ commit, sha }) => { + const { userId } = fromGithubCommitMessage(commit.message) + const author = await this.users.findByPk(userId) + const lastChangedTime = new Date(commit.author.date).getTime() + mappings[sha] = { + author: author?.email || commit.author.name, + unixTime: lastChangedTime, + } + }) + ) + return mappings + } + + computeCommentData = async ( + comments: GithubCommentData[], + viewedTime: Date | null + ) => { + const mappings = await Promise.all( + comments.map(async ({ userId, message, createdAt }) => { + const createdTime = new Date(createdAt) + const author = await this.users.findByPk(userId) + return { + user: author?.email || "", + message, + createdAt: createdTime.getTime(), + isRead: viewedTime ? createdTime < viewedTime : false, + } + }) + ) + return mappings + } + + createReviewRequest = async ( + sessionData: UserWithSiteSessionData, + reviewers: User[], + requestor: User, + site: Site, + title: string, + description?: string + ): Promise => { + const { siteName } = sessionData + // Step 1: Create an actual pull request on Github + // From head -> base + const pullRequestNumber = await this.apiService.createPullRequest( + siteName, + title, + description + ) + + // Step 2: Only update internal model state once PR is created + const reviewRequest = await this.repository.create({ + requestorId: requestor.id, + siteId: site.id, + }) + await Promise.all( + reviewers.map(({ id }) => + this.reviewers.create({ + requestId: reviewRequest.id, + reviewerId: id, + }) + ) + ) + + await this.reviewMeta.create({ + reviewId: reviewRequest.id, + pullRequestNumber, + reviewLink: `cms.isomer.gov.sg/sites/${siteName}/review/${pullRequestNumber}`, + }) + + return pullRequestNumber + } + + listReviewRequest = async ( + sessionData: UserWithSiteSessionData, + site: Site + ): Promise => { + const { siteName, isomerUserId: userId } = sessionData + + // Find all review requests associated with the site + const requests = await this.repository.findAll({ + where: { + siteId: site.id, + }, + include: [ + { + model: ReviewMeta, + as: "reviewMeta", + }, + { + model: User, + as: "requestor", + }, + ], + }) + + // NOTE: This has a max of 30 pull requests + // and returns only open pull requests. + return Promise.all( + requests.map(async (req) => { + const { pullRequestNumber } = req.reviewMeta + // NOTE: We explicitly destructure as the raw data + // contains ALOT more than these fields, which we want to + // discard to lower retrieval times for FE + const { + title, + body, + changed_files, + created_at, + } = await this.apiService.getPullRequest(siteName, pullRequestNumber) + + // It is the user's first view if the review request views table + // does not contain a record for the user and the review request + const isFirstView = !(await this.reviewRequestView.count({ + where: { + reviewRequestId: req.id, + siteId: site.id, + userId, + }, + })) + + // It is a new comment to the user if any of the following + // conditions satisfy: + // 1. The review request views table does not contain a record + // for the user and the review request. + // 2. The review request views table contains a record for that + // user and review request, but the lastViewedAt entry is NULL. + // 3. The review request views table contains a record in the + // lastViewedAt entry, and the comment has a timestamp greater + // than the one stored in the database. + const allComments = await this.getComments( + sessionData, + site, + pullRequestNumber + ) + const countNewComments = await Promise.all( + allComments.map(async (value) => value.isRead) + ).then((arr) => { + const unreadComments = arr.filter((isRead) => !isRead) + return unreadComments.length + }) + + return { + id: pullRequestNumber, + author: req.requestor.email || "Unknown user", + status: req.reviewStatus, + title, + description: body || "", + changedFiles: changed_files, + createdAt: new Date(created_at).getTime(), + newComments: countNewComments, + firstView: isFirstView, + } + }) + ) + } + + markAllReviewRequestsAsViewed = async ( + sessionData: UserWithSiteSessionData, + site: Site + ): Promise => { + const { isomerUserId: userId } = sessionData + + const requestsViewed = await this.reviewRequestView.findAll({ + where: { + siteId: site.id, + userId, + }, + }) + + const allActiveRequests = await this.repository.findAll({ + where: { + siteId: site.id, + // NOTE: Closed and merged review requests would not have an + // entry in the review request views table + reviewStatus: ["OPEN", "APPROVED"], + }, + }) + + const requestIdsViewed = requestsViewed.map( + (request) => request.reviewRequestId + ) + const allActiveRequestIds = allActiveRequests.map((request) => request.id) + const requestIdsToMarkAsViewed = _.difference( + allActiveRequestIds, + requestIdsViewed + ) + + await Promise.all( + // Using map here to allow creations to be done concurrently + // But we do not actually need the result of the view creation + requestIdsToMarkAsViewed.map( + async (requestId) => + await this.reviewRequestView.create({ + reviewRequestId: requestId, + siteId: site.id, + userId, + // This field represents the user opening the review request + // itself, which the user has not done so yet at this stage. + lastViewedAt: null, + }) + ) + ) + } + + updateReviewRequestLastViewedAt = async ( + sessionData: UserWithSiteSessionData, + site: Site, + reviewRequest: ReviewRequest + ): Promise => { + const { isomerUserId: userId } = sessionData + const { id: reviewRequestId } = reviewRequest + + await this.reviewRequestView.upsert({ + reviewRequestId, + siteId: site.id, + userId, + lastViewedAt: new Date(), + }) + } + + markReviewRequestAsViewed = async ( + sessionData: UserWithSiteSessionData, + site: Site, + requestId: number + ): Promise => { + const { isomerUserId: userId } = sessionData + + const reviewRequestView = await this.reviewRequestView.findOne({ + where: { + siteId: site.id, + userId, + reviewRequestId: requestId, + }, + }) + + // We only want to create the entry if it does not exist + // (i.e. the review request has never been viewed before) + if (!reviewRequestView) { + await this.reviewRequestView.create({ + reviewRequestId: requestId, + siteId: site.id, + userId, + // This field represents the user opening the review request + // itself, which the user has not done so yet at this stage. + lastViewedAt: null, + }) + } + } + + deleteAllReviewRequestViews = async ( + site: Site, + pullRequestNumber: number + ): Promise => { + const possibleReviewRequest = await this.getReviewRequest( + site, + pullRequestNumber + ) + + if (isIsomerError(possibleReviewRequest)) { + return possibleReviewRequest + } + + const { id: reviewRequestId } = possibleReviewRequest + + await this.reviewRequestView.destroy({ + where: { + reviewRequestId, + siteId: site.id, + }, + }) + } + + getReviewRequest = async (site: Site, pullRequestNumber: number) => { + const possibleReviewRequest = await this.repository.findOne({ + where: { + siteId: site.id, + }, + include: [ + { + model: ReviewMeta, + as: "reviewMeta", + where: { + pullRequestNumber, + }, + }, + { + model: User, + as: "requestor", + }, + { + model: User, + as: "reviewers", + }, + { + model: Site, + }, + ], + }) + + if (!possibleReviewRequest) { + return new RequestNotFoundError() + } + + return possibleReviewRequest + } + + getLatestMergedReviewRequest = async (site: Site) => { + const possibleReviewRequest = await this.repository.findOne({ + where: { + siteId: site.id, + reviewStatus: ReviewRequestStatus.Merged, + }, + include: [ + { + model: ReviewMeta, + as: "reviewMeta", + }, + { + model: User, + as: "requestor", + }, + { + model: User, + as: "reviewers", + }, + { + model: Site, + }, + ], + order: [ + [ + { + model: ReviewMeta, + as: "reviewMeta", + }, + "pullRequestNumber", + "DESC", + ], + ], + }) + + if (!possibleReviewRequest) { + return new RequestNotFoundError() + } + + return possibleReviewRequest + } + + getFullReviewRequest = async ( + userWithSiteSessionData: UserWithSiteSessionData, + site: Site, + pullRequestNumber: number + ): Promise => { + const { siteName } = userWithSiteSessionData + const review = await this.repository.findOne({ + where: { + siteId: site.id, + }, + include: [ + { + model: ReviewMeta, + as: "reviewMeta", + where: { + pullRequestNumber, + }, + }, + { + model: User, + as: "requestor", + }, + { + model: User, + as: "reviewers", + }, + { + model: Site, + }, + ], + }) + + // As the db stores github's PR # and (siteName, prNumber) + // should be a unique identifier for a specific review request, + // unable to find a RR with the tuple implies that said RR does not exist. + // This could happen when the user queries for an existing PR that is on github, + // but created prior to this feature rolling out. + if (!review) { + return new RequestNotFoundError() + } + + // NOTE: We explicitly destructure as the raw data + // contains ALOT more than these fields, which we want to + // discard to lower retrieval times for FE + const { title, created_at } = await this.apiService.getPullRequest( + siteName, + pullRequestNumber + ) + + const changedItems = await this.compareDiff(userWithSiteSessionData) + + return { + reviewUrl: review.reviewMeta.reviewLink, + title, + status: review.reviewStatus, + requestor: review.requestor.email || "", + reviewers: review.reviewers.map(({ email }) => email || ""), + reviewRequestedTime: new Date(created_at).getTime(), + changedItems, + } + } + + updateReviewRequest = async ( + reviewRequest: ReviewRequest, + { reviewers }: RequestChangeInfo + ) => { + // Update db state with new reviewers + await reviewRequest.$set("reviewers", reviewers) + await reviewRequest.save() + } + + // NOTE: The semantics of our reviewing system is slightly different from github. + // The approval is tied to the request, rather than the user. + approveReviewRequest = async (reviewRequest: ReviewRequest) => { + reviewRequest.reviewStatus = ReviewRequestStatus.Approved + await reviewRequest.save() + } + + deleteReviewRequestApproval = async (reviewRequest: ReviewRequest) => { + reviewRequest.reviewStatus = ReviewRequestStatus.Open + await reviewRequest.save() + } + + closeReviewRequest = async (reviewRequest: ReviewRequest) => { + const siteName = reviewRequest.site.name + const { pullRequestNumber } = reviewRequest.reviewMeta + await this.apiService.closeReviewRequest(siteName, pullRequestNumber) + + reviewRequest.reviewStatus = ReviewRequestStatus.Closed + await reviewRequest.save() + } + + mergeReviewRequest = async ( + reviewRequest: ReviewRequest + ): Promise => { + const siteName = reviewRequest.site.name + const { pullRequestNumber } = reviewRequest.reviewMeta + + await this.apiService.approvePullRequest(siteName, pullRequestNumber) + await this.apiService.mergePullRequest(siteName, pullRequestNumber) + + reviewRequest.reviewStatus = ReviewRequestStatus.Merged + return reviewRequest.save() + } + + createComment = async ( + sessionData: UserWithSiteSessionData, + pullRequestNumber: number, + message: string + ) => { + const { siteName, isomerUserId } = sessionData + + return this.apiService.createComment( + siteName, + pullRequestNumber, + isomerUserId, + message + ) + } + + getComments = async ( + sessionData: UserWithSiteSessionData, + site: Site, + pullRequestNumber: number + ): Promise => { + const { siteName, isomerUserId: userId } = sessionData + + const comments = await this.apiService.getComments( + siteName, + pullRequestNumber + ) + + const requestsView = await this.reviewRequestView.findOne({ + where: { + siteId: site.id, + userId, + }, + include: [ + { + model: ReviewRequest, + required: true, + include: [ + { + model: ReviewMeta, + required: true, + where: { + pullRequestNumber, + }, + }, + ], + }, + ], + }) + + const viewedTime = requestsView ? new Date(requestsView.lastViewedAt) : null + + return this.computeCommentData(comments, viewedTime) + } + + getBlob = async (repo: string, path: string, ref: string): Promise => + this.apiService.getBlob(repo, path, ref) +} diff --git a/src/services/review/__tests__/ReviewRequestService.spec.ts b/src/services/review/__tests__/ReviewRequestService.spec.ts new file mode 100644 index 000000000..d168fb1e9 --- /dev/null +++ b/src/services/review/__tests__/ReviewRequestService.spec.ts @@ -0,0 +1,1151 @@ +import _ from "lodash" +import { Attributes, ModelStatic } from "sequelize" + +import { + ReviewRequest, + ReviewMeta, + ReviewRequestView, + User, + Reviewer, + Site, +} from "@database/models" +import { ReviewRequestStatus } from "@root/constants" +import RequestNotFoundError from "@root/errors/RequestNotFoundError" +import { + mockCollaboratorAdmin1, + mockCollaboratorAdmin2, + mockCollaboratorContributor1, + mockSiteOrmResponseWithAllCollaborators, + MOCK_COMMIT_FILEPATH_TWO, + MOCK_COMMIT_MESSAGE_ONE, + MOCK_COMMIT_MESSAGE_TWO, + MOCK_GITHUB_COMMENT_DATA_ONE, + MOCK_GITHUB_COMMENT_DATA_TWO, + MOCK_GITHUB_COMMENT_ONE, + MOCK_GITHUB_COMMENT_TWO, + MOCK_GITHUB_COMMIT_AUTHOR_ONE, + MOCK_GITHUB_COMMIT_AUTHOR_TWO, + MOCK_GITHUB_DATE_ONE, + MOCK_GITHUB_DATE_TWO, + MOCK_GITHUB_EMAIL_ADDRESS_ONE, + MOCK_GITHUB_EMAIL_ADDRESS_TWO, + MOCK_GITHUB_NAME_ONE, + MOCK_GITHUB_NAME_TWO, + MOCK_IDENTITY_EMAIL_ONE, + MOCK_IDENTITY_EMAIL_THREE, + MOCK_IDENTITY_EMAIL_TWO, +} from "@root/fixtures/identity" +import { + MOCK_PULL_REQUEST_COMMIT_ONE, + MOCK_PULL_REQUEST_COMMIT_TWO, + MOCK_PULL_REQUEST_FILECHANGEINFO_ONE, + MOCK_PULL_REQUEST_FILECHANGEINFO_TWO, + MOCK_PULL_REQUEST_FILE_FILENAME_ONE, + MOCK_PULL_REQUEST_FILE_FILENAME_TWO, + MOCK_PULL_REQUEST_ONE, + MOCK_REVIEW_REQUEST_ONE, + MOCK_REVIEW_REQUEST_VIEW_ONE, +} from "@root/fixtures/review" +import { mockUserWithSiteSessionData } from "@root/fixtures/sessionData" +import { EditedItemDto, GithubCommentData } from "@root/types/dto/review" +import { Commit } from "@root/types/github" +import * as ReviewApi from "@services/db/review" +import _ReviewRequestService from "@services/review/ReviewRequestService" + +const MockReviewApi = { + approvePullRequest: jest.fn(), + closeReviewRequest: jest.fn(), + createComment: jest.fn(), + createPullRequest: jest.fn(), + mergePullRequest: jest.fn(), + getComments: jest.fn(), + getCommitDiff: jest.fn(), + getPullRequest: jest.fn(), +} + +const MockUsersRepository = { + findByPk: jest.fn(), +} + +const MockReviewRequestRepository = { + create: jest.fn(), + findAll: jest.fn(), + findOne: jest.fn(), +} + +const MockReviewersRepository = { + create: jest.fn(), +} + +const MockReviewMetaRepository = { + create: jest.fn(), +} + +const MockReviewRequestViewRepository = { + count: jest.fn(), + create: jest.fn(), + destroy: jest.fn(), + findAll: jest.fn(), + findOne: jest.fn(), + upsert: jest.fn(), +} + +const MockReviewRequest = { + ...MOCK_REVIEW_REQUEST_ONE, + $set: jest.fn(), + save: jest.fn(), +} + +const ReviewRequestService = new _ReviewRequestService( + (MockReviewApi as unknown) as typeof ReviewApi, + (MockUsersRepository as unknown) as ModelStatic, + (MockReviewRequestRepository as unknown) as ModelStatic, + (MockReviewersRepository as unknown) as ModelStatic, + (MockReviewMetaRepository as unknown) as ModelStatic, + (MockReviewRequestViewRepository as unknown) as ModelStatic +) + +const SpyReviewRequestService = { + computeCommentData: jest.spyOn(ReviewRequestService, "computeCommentData"), + computeFileType: jest.spyOn(ReviewRequestService, "computeFileType"), + computeFileUrl: jest.spyOn(ReviewRequestService, "computeFileUrl"), + computeShaMappings: jest.spyOn(ReviewRequestService, "computeShaMappings"), + getComments: jest.spyOn(ReviewRequestService, "getComments"), + getReviewRequest: jest.spyOn(ReviewRequestService, "getReviewRequest"), +} + +describe("ReviewRequestService", () => { + // Prevent inter-test pollution of mocks + afterEach(() => jest.clearAllMocks()) + + describe("compareDiff", () => { + it("should return an array of edited item objects", async () => { + // Arrange + const mockCommitDiff = { + files: [ + MOCK_PULL_REQUEST_FILECHANGEINFO_ONE, + MOCK_PULL_REQUEST_FILECHANGEINFO_TWO, + ], + commits: [MOCK_PULL_REQUEST_COMMIT_ONE, MOCK_PULL_REQUEST_COMMIT_TWO], + } + const expected = [ + { + type: ["page"], + name: MOCK_PULL_REQUEST_FILE_FILENAME_ONE, + path: [], + url: "www.google.com", + lastEditedBy: MOCK_GITHUB_NAME_ONE, + lastEditedTime: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + }, + { + type: ["page"], + name: MOCK_PULL_REQUEST_FILE_FILENAME_TWO, + path: MOCK_COMMIT_FILEPATH_TWO.split("/"), + url: "www.google.com", + lastEditedBy: MOCK_GITHUB_NAME_TWO, + lastEditedTime: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + }, + ] + MockReviewApi.getCommitDiff.mockResolvedValueOnce(mockCommitDiff) + + // Act + const actual = await ReviewRequestService.compareDiff( + mockUserWithSiteSessionData + ) + + // Assert + expect(actual).toEqual(expected) + expect(SpyReviewRequestService.computeShaMappings).toHaveBeenCalled() + expect(SpyReviewRequestService.computeFileType).toHaveBeenCalled() + expect(SpyReviewRequestService.computeFileUrl).toHaveBeenCalled() + }) + + it("should return an empty array if there are no file changes or commits", async () => { + // Arrange + const mockCommitDiff = { + files: [], + commits: [], + } + const expected: EditedItemDto[] = [] + MockReviewApi.getCommitDiff.mockResolvedValueOnce(mockCommitDiff) + + // Act + const actual = await ReviewRequestService.compareDiff( + mockUserWithSiteSessionData + ) + + // Assert + expect(actual).toEqual(expected) + expect(SpyReviewRequestService.computeShaMappings).toHaveBeenCalled() + expect(SpyReviewRequestService.computeFileType).not.toHaveBeenCalled() + expect(SpyReviewRequestService.computeFileUrl).not.toHaveBeenCalled() + }) + + it("should return an empty array if there are no file changes only", async () => { + // Arrange + const mockCommitDiff = { + files: [], + commits: [MOCK_PULL_REQUEST_COMMIT_ONE, MOCK_PULL_REQUEST_COMMIT_TWO], + } + const expected: EditedItemDto[] = [] + MockReviewApi.getCommitDiff.mockResolvedValueOnce(mockCommitDiff) + + // Act + const actual = await ReviewRequestService.compareDiff( + mockUserWithSiteSessionData + ) + + // Assert + expect(actual).toEqual(expected) + expect(SpyReviewRequestService.computeShaMappings).toHaveBeenCalled() + expect(SpyReviewRequestService.computeFileType).not.toHaveBeenCalled() + expect(SpyReviewRequestService.computeFileUrl).not.toHaveBeenCalled() + }) + }) + + describe("computeFileType", () => { + // TODO + it("should return the correct file type", () => { + // Arrange + const expected = ["page"] + + // Act + const actual = ReviewRequestService.computeFileType("filename") + + // Assert + expect(actual).toEqual(expected) + }) + }) + + describe("computeFileUrl", () => { + // TODO + it("should return the correct file URL", () => { + // Arrange + const expected = "www.google.com" + + // Act + const actual = ReviewRequestService.computeFileUrl("filename", "siteName") + + // Assert + expect(actual).toEqual(expected) + }) + }) + + describe("computeShaMappings", () => { + it("should return the correct sha mappings for pure identity commits", async () => { + // Arrange + const mockCommits: Commit[] = [ + MOCK_PULL_REQUEST_COMMIT_ONE, + MOCK_PULL_REQUEST_COMMIT_TWO, + ] + const expected = { + [MOCK_PULL_REQUEST_COMMIT_ONE.sha]: { + author: MOCK_IDENTITY_EMAIL_ONE, + unixTime: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + }, + [MOCK_PULL_REQUEST_COMMIT_TWO.sha]: { + author: MOCK_IDENTITY_EMAIL_TWO, + unixTime: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + }, + } + MockUsersRepository.findByPk.mockResolvedValueOnce({ + email: MOCK_IDENTITY_EMAIL_ONE, + }) + MockUsersRepository.findByPk.mockResolvedValueOnce({ + email: MOCK_IDENTITY_EMAIL_TWO, + }) + + // Act + const actual = await ReviewRequestService.computeShaMappings(mockCommits) + + // Assert + expect(actual).toEqual(expected) + expect(MockUsersRepository.findByPk).toHaveBeenCalledTimes(2) + }) + + it("should return the correct sha mappings for non-identity commits", async () => { + // Arrange + const mockNonIdentityCommitOne = _.set( + _.clone(MOCK_PULL_REQUEST_COMMIT_ONE), + "commit.message", + MOCK_COMMIT_MESSAGE_ONE + ) + const mockNonIdentityCommitTwo = _.set( + _.clone(MOCK_PULL_REQUEST_COMMIT_TWO), + "commit.message", + MOCK_COMMIT_MESSAGE_TWO + ) + + const mockCommits: Commit[] = [ + mockNonIdentityCommitOne, + mockNonIdentityCommitTwo, + ] + const expected = { + [MOCK_PULL_REQUEST_COMMIT_ONE.sha]: { + author: MOCK_GITHUB_NAME_ONE, + unixTime: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + }, + [MOCK_PULL_REQUEST_COMMIT_TWO.sha]: { + author: MOCK_GITHUB_NAME_TWO, + unixTime: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + }, + } + MockUsersRepository.findByPk.mockResolvedValueOnce(null) + MockUsersRepository.findByPk.mockResolvedValueOnce(null) + + // Act + const actual = await ReviewRequestService.computeShaMappings(mockCommits) + + // Assert + expect(actual).toEqual(expected) + expect(MockUsersRepository.findByPk).toHaveBeenCalledTimes(2) + expect(MockUsersRepository.findByPk).toHaveBeenNthCalledWith(1, undefined) + expect(MockUsersRepository.findByPk).toHaveBeenNthCalledWith(2, undefined) + }) + + it("should return an empty object if there are no commits", async () => { + // Arrange + const mockCommits: Commit[] = [] + const expected = {} + + // Act + const actual = await ReviewRequestService.computeShaMappings(mockCommits) + + // Assert + expect(actual).toEqual(expected) + expect(MockUsersRepository.findByPk).not.toHaveBeenCalled() + }) + }) + + describe("computeCommentData", () => { + it("should return the correct comment objects with a valid viewedTime", async () => { + // Arrange + const mockComments: GithubCommentData[] = [ + MOCK_GITHUB_COMMENT_DATA_ONE, + MOCK_GITHUB_COMMENT_DATA_TWO, + ] + const mockViewedTime = new Date("2022-09-23T00:00:00Z") + const expected = [ + { + user: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + message: MOCK_GITHUB_COMMENT_ONE, + createdAt: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + isRead: true, + }, + { + user: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + message: MOCK_GITHUB_COMMENT_TWO, + createdAt: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + isRead: false, + }, + ] + MockUsersRepository.findByPk.mockResolvedValueOnce( + MOCK_GITHUB_COMMIT_AUTHOR_ONE + ) + MockUsersRepository.findByPk.mockResolvedValueOnce( + MOCK_GITHUB_COMMIT_AUTHOR_TWO + ) + + // Act + const actual = await ReviewRequestService.computeCommentData( + mockComments, + mockViewedTime + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockUsersRepository.findByPk).toHaveBeenCalledTimes(2) + }) + + it("should return the correct comment objects with viewedTime being null", async () => { + // Arrange + const mockComments: GithubCommentData[] = [ + MOCK_GITHUB_COMMENT_DATA_ONE, + MOCK_GITHUB_COMMENT_DATA_TWO, + ] + const expected = [ + { + user: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + message: MOCK_GITHUB_COMMENT_ONE, + createdAt: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + isRead: false, + }, + { + user: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + message: MOCK_GITHUB_COMMENT_TWO, + createdAt: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + isRead: false, + }, + ] + MockUsersRepository.findByPk.mockResolvedValueOnce( + MOCK_GITHUB_COMMIT_AUTHOR_ONE + ) + MockUsersRepository.findByPk.mockResolvedValueOnce( + MOCK_GITHUB_COMMIT_AUTHOR_TWO + ) + + // Act + const actual = await ReviewRequestService.computeCommentData( + mockComments, + null + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockUsersRepository.findByPk).toHaveBeenCalledTimes(2) + }) + + it("should return empty email string if user is not found", async () => { + // Arrange + const mockComments: GithubCommentData[] = [ + MOCK_GITHUB_COMMENT_DATA_ONE, + MOCK_GITHUB_COMMENT_DATA_TWO, + ] + const expected = [ + { + user: "", + message: MOCK_GITHUB_COMMENT_ONE, + createdAt: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + isRead: false, + }, + { + user: "", + message: MOCK_GITHUB_COMMENT_TWO, + createdAt: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + isRead: false, + }, + ] + MockUsersRepository.findByPk.mockResolvedValueOnce(null) + MockUsersRepository.findByPk.mockResolvedValueOnce(null) + + // Act + const actual = await ReviewRequestService.computeCommentData( + mockComments, + null + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockUsersRepository.findByPk).toHaveBeenCalledTimes(2) + }) + + it("should return empty array if there are no comments", async () => { + // Arrange + const mockComments: GithubCommentData[] = [] + + // Act + const actual = await ReviewRequestService.computeCommentData( + mockComments, + null + ) + + // Assert + expect(actual).toEqual([]) + expect(MockUsersRepository.findByPk).not.toHaveBeenCalled() + }) + }) + + describe("createReviewRequest", () => { + it("should create the review request successfully", async () => { + // Arrange + const mockReviewers = [mockCollaboratorAdmin1, mockCollaboratorAdmin2] + const mockRequestor = mockCollaboratorContributor1 + const mockSite = mockSiteOrmResponseWithAllCollaborators as Attributes + const mockTitle = "test title" + const mockDescription = "test description" + const mockPullRequestNumber = MOCK_REVIEW_REQUEST_ONE.id + const expected = mockPullRequestNumber + MockReviewApi.createPullRequest.mockResolvedValueOnce( + mockPullRequestNumber + ) + MockReviewRequestRepository.create.mockResolvedValueOnce( + MOCK_REVIEW_REQUEST_ONE + ) + MockReviewersRepository.create.mockResolvedValueOnce(undefined) + MockReviewersRepository.create.mockResolvedValueOnce(undefined) + MockReviewMetaRepository.create.mockResolvedValueOnce(undefined) + + // Act + const actual = await ReviewRequestService.createReviewRequest( + mockUserWithSiteSessionData, + mockReviewers, + mockRequestor, + mockSite, + mockTitle, + mockDescription + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockReviewApi.createPullRequest).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + mockTitle, + mockDescription + ) + expect(MockReviewRequestRepository.create).toHaveBeenCalledWith({ + requestorId: mockRequestor.id, + siteId: mockSite.id, + }) + expect(MockReviewersRepository.create).toHaveBeenNthCalledWith(1, { + requestId: MOCK_REVIEW_REQUEST_ONE.id, + reviewerId: mockCollaboratorAdmin1.id, + }) + expect(MockReviewersRepository.create).toHaveBeenNthCalledWith(2, { + requestId: MOCK_REVIEW_REQUEST_ONE.id, + reviewerId: mockCollaboratorAdmin2.id, + }) + expect(MockReviewMetaRepository.create).toHaveBeenCalledWith({ + reviewId: MOCK_REVIEW_REQUEST_ONE.id, + pullRequestNumber: mockPullRequestNumber, + reviewLink: `cms.isomer.gov.sg/sites/${mockUserWithSiteSessionData.siteName}/review/${mockPullRequestNumber}`, + }) + }) + }) + + describe("listReviewRequest", () => { + // NOTE: We are only assuming one review request is returned + it("should return an array of basic review request objects not viewed before", async () => { + // Arrange + const expected = [ + { + id: MOCK_REVIEW_REQUEST_ONE.id, + author: MOCK_IDENTITY_EMAIL_ONE, + status: MOCK_REVIEW_REQUEST_ONE.reviewStatus, + title: MOCK_PULL_REQUEST_ONE.title, + description: MOCK_PULL_REQUEST_ONE.body, + changedFiles: MOCK_PULL_REQUEST_ONE.changed_files, + createdAt: new Date(MOCK_PULL_REQUEST_ONE.created_at).getTime(), + newComments: 2, + firstView: true, + }, + ] + MockReviewRequestRepository.findAll.mockResolvedValueOnce([ + MOCK_REVIEW_REQUEST_ONE, + ]) + MockReviewApi.getPullRequest.mockResolvedValueOnce(MOCK_PULL_REQUEST_ONE) + MockReviewRequestViewRepository.count.mockResolvedValueOnce(0) + MockReviewApi.getComments.mockResolvedValueOnce([ + MOCK_GITHUB_COMMENT_DATA_ONE, + MOCK_GITHUB_COMMENT_DATA_TWO, + ]) + MockReviewRequestViewRepository.findOne.mockResolvedValueOnce(null) + MockUsersRepository.findByPk.mockResolvedValueOnce({ + email: MOCK_IDENTITY_EMAIL_ONE, + }) + MockUsersRepository.findByPk.mockResolvedValueOnce({ + email: MOCK_IDENTITY_EMAIL_TWO, + }) + + // Act + const actual = await ReviewRequestService.listReviewRequest( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockReviewRequestRepository.findAll).toHaveBeenCalled() + expect(MockReviewApi.getPullRequest).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(MockReviewRequestViewRepository.count).toHaveBeenCalled() + expect(SpyReviewRequestService.getComments).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(MockReviewApi.getComments).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(SpyReviewRequestService.computeCommentData).toHaveBeenCalled() + expect(MockUsersRepository.findByPk).toHaveBeenCalledTimes(2) + }) + + it("should return an array of basic review request objects with a mix of read and unread comments", async () => { + // Arrange + const expected = [ + { + id: MOCK_REVIEW_REQUEST_ONE.id, + author: MOCK_IDENTITY_EMAIL_ONE, + status: MOCK_REVIEW_REQUEST_ONE.reviewStatus, + title: MOCK_PULL_REQUEST_ONE.title, + description: MOCK_PULL_REQUEST_ONE.body, + changedFiles: MOCK_PULL_REQUEST_ONE.changed_files, + createdAt: new Date(MOCK_PULL_REQUEST_ONE.created_at).getTime(), + newComments: 1, + firstView: false, + }, + ] + MockReviewRequestRepository.findAll.mockResolvedValueOnce([ + MOCK_REVIEW_REQUEST_ONE, + ]) + MockReviewApi.getPullRequest.mockResolvedValueOnce(MOCK_PULL_REQUEST_ONE) + MockReviewRequestViewRepository.count.mockResolvedValueOnce(1) + MockReviewApi.getComments.mockResolvedValueOnce([ + MOCK_GITHUB_COMMENT_DATA_ONE, + MOCK_GITHUB_COMMENT_DATA_TWO, + ]) + MockReviewRequestViewRepository.findOne.mockResolvedValueOnce( + MOCK_REVIEW_REQUEST_VIEW_ONE + ) + MockUsersRepository.findByPk.mockResolvedValueOnce({ + email: MOCK_IDENTITY_EMAIL_ONE, + }) + MockUsersRepository.findByPk.mockResolvedValueOnce({ + email: MOCK_IDENTITY_EMAIL_TWO, + }) + + // Act + const actual = await ReviewRequestService.listReviewRequest( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockReviewRequestRepository.findAll).toHaveBeenCalled() + expect(MockReviewApi.getPullRequest).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(MockReviewRequestViewRepository.count).toHaveBeenCalled() + expect(SpyReviewRequestService.getComments).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(MockReviewApi.getComments).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(SpyReviewRequestService.computeCommentData).toHaveBeenCalled() + expect(MockUsersRepository.findByPk).toHaveBeenCalledTimes(2) + }) + + it("should return an array of basic review request objects with no comments", async () => { + // Arrange + const expected = [ + { + id: MOCK_REVIEW_REQUEST_ONE.id, + author: MOCK_IDENTITY_EMAIL_ONE, + status: MOCK_REVIEW_REQUEST_ONE.reviewStatus, + title: MOCK_PULL_REQUEST_ONE.title, + description: MOCK_PULL_REQUEST_ONE.body, + changedFiles: MOCK_PULL_REQUEST_ONE.changed_files, + createdAt: new Date(MOCK_PULL_REQUEST_ONE.created_at).getTime(), + newComments: 0, + firstView: false, + }, + ] + MockReviewRequestRepository.findAll.mockResolvedValueOnce([ + MOCK_REVIEW_REQUEST_ONE, + ]) + MockReviewApi.getPullRequest.mockResolvedValueOnce(MOCK_PULL_REQUEST_ONE) + MockReviewRequestViewRepository.count.mockResolvedValueOnce(1) + MockReviewApi.getComments.mockResolvedValueOnce([]) + MockReviewRequestViewRepository.findOne.mockResolvedValueOnce( + MOCK_REVIEW_REQUEST_VIEW_ONE + ) + + // Act + const actual = await ReviewRequestService.listReviewRequest( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockReviewRequestRepository.findAll).toHaveBeenCalled() + expect(MockReviewApi.getPullRequest).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(MockReviewRequestViewRepository.count).toHaveBeenCalled() + expect(SpyReviewRequestService.getComments).toHaveBeenCalledWith( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(MockReviewApi.getComments).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + MOCK_REVIEW_REQUEST_ONE.reviewMeta.pullRequestNumber + ) + expect(SpyReviewRequestService.computeCommentData).toHaveBeenCalled() + expect(MockUsersRepository.findByPk).not.toHaveBeenCalled() + }) + }) + + describe("markAllReviewRequestsAsViewed", () => { + it("should mark all review requests as viewed successfully", async () => { + // Arrange + MockReviewRequestViewRepository.findAll.mockResolvedValueOnce([]) + MockReviewRequestRepository.findAll.mockResolvedValueOnce([ + MOCK_REVIEW_REQUEST_ONE, + ]) + MockReviewRequestViewRepository.create.mockResolvedValueOnce(undefined) + + // Act + await ReviewRequestService.markAllReviewRequestsAsViewed( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes + ) + + // Assert + expect(MockReviewRequestViewRepository.findAll).toHaveBeenCalled() + expect(MockReviewRequestRepository.findAll).toHaveBeenCalled() + expect(MockReviewRequestViewRepository.create).toHaveBeenCalledWith({ + reviewRequestId: MOCK_REVIEW_REQUEST_ONE.id, + siteId: mockSiteOrmResponseWithAllCollaborators.id, + userId: mockUserWithSiteSessionData.isomerUserId, + lastViewedAt: null, + }) + }) + + it("should not mark any review request as viewed if they have already been viewed", async () => { + // Arrange + MockReviewRequestViewRepository.findAll.mockResolvedValueOnce([ + MOCK_REVIEW_REQUEST_VIEW_ONE, + ]) + MockReviewRequestRepository.findAll.mockResolvedValueOnce([ + MOCK_REVIEW_REQUEST_ONE, + ]) + MockReviewRequestViewRepository.create.mockResolvedValueOnce(undefined) + + // Act + await ReviewRequestService.markAllReviewRequestsAsViewed( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes + ) + + // Assert + expect(MockReviewRequestViewRepository.findAll).toHaveBeenCalled() + expect(MockReviewRequestRepository.findAll).toHaveBeenCalled() + expect(MockReviewRequestViewRepository.create).not.toHaveBeenCalled() + }) + }) + + describe("updateReviewRequestLastViewedAt", () => { + it("should insert/update the review request view entry", async () => { + // Arrange + MockReviewRequestViewRepository.upsert.mockResolvedValueOnce(undefined) + + // Act + await ReviewRequestService.updateReviewRequestLastViewedAt( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE + ) + + // Assert + expect(MockReviewRequestViewRepository.upsert).toHaveBeenCalledWith({ + reviewRequestId: MOCK_REVIEW_REQUEST_ONE.id, + siteId: mockSiteOrmResponseWithAllCollaborators.id, + userId: mockUserWithSiteSessionData.isomerUserId, + // NOTE: We can't use new Date() due to potential time lags + lastViewedAt: expect.any(Date), + }) + }) + }) + + describe("markReviewRequestAsViewed", () => { + it("should create a review request view entry if it does not already exist", async () => { + // Arrange + MockReviewRequestViewRepository.findOne.mockResolvedValueOnce(null) + MockReviewRequestViewRepository.create.mockResolvedValueOnce(undefined) + + // Act + await ReviewRequestService.markReviewRequestAsViewed( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(MockReviewRequestViewRepository.findOne).toHaveBeenCalled() + expect(MockReviewRequestViewRepository.create).toHaveBeenCalledWith({ + reviewRequestId: MOCK_REVIEW_REQUEST_ONE.id, + siteId: mockSiteOrmResponseWithAllCollaborators.id, + userId: mockUserWithSiteSessionData.isomerUserId, + lastViewedAt: null, + }) + }) + + it("should not do anything if the review request view entry already exists", async () => { + // Arrange + MockReviewRequestViewRepository.findOne.mockResolvedValueOnce( + MOCK_REVIEW_REQUEST_VIEW_ONE + ) + + // Act + await ReviewRequestService.markReviewRequestAsViewed( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(MockReviewRequestViewRepository.findOne).toHaveBeenCalled() + expect(MockReviewRequestViewRepository.create).not.toHaveBeenCalled() + }) + }) + + describe("deleteAllReviewRequestViews", () => { + it("should delete all existing review request view entries successfully", async () => { + // Arrange + MockReviewRequestRepository.findOne.mockResolvedValueOnce( + MOCK_REVIEW_REQUEST_ONE + ) + MockReviewRequestViewRepository.destroy.mockResolvedValueOnce(undefined) + + // Act + await ReviewRequestService.deleteAllReviewRequestViews( + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(SpyReviewRequestService.getReviewRequest).toHaveBeenCalledWith( + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + expect(MockReviewRequestRepository.findOne).toHaveBeenCalled() + expect(MockReviewRequestViewRepository.destroy).toHaveBeenCalledWith({ + where: { + reviewRequestId: MOCK_REVIEW_REQUEST_ONE.id, + siteId: mockSiteOrmResponseWithAllCollaborators.id, + }, + }) + }) + + it("should return an error if the review request is not found", async () => { + // Arrange + MockReviewRequestRepository.findOne.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const actual = await ReviewRequestService.deleteAllReviewRequestViews( + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(actual).toBeInstanceOf(RequestNotFoundError) + expect(SpyReviewRequestService.getReviewRequest).toHaveBeenCalledWith( + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + expect(MockReviewRequestRepository.findOne).toHaveBeenCalled() + expect(MockReviewRequestViewRepository.destroy).not.toHaveBeenCalled() + }) + }) + + describe("getReviewRequest", () => { + it("should return the review request object if it exists", async () => { + // Arrange + MockReviewRequestRepository.findOne.mockResolvedValueOnce( + MOCK_REVIEW_REQUEST_ONE + ) + + // Act + const actual = await ReviewRequestService.getReviewRequest( + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(actual).toEqual(MOCK_REVIEW_REQUEST_ONE) + expect(MockReviewRequestRepository.findOne).toHaveBeenCalled() + }) + + it("should return an error if the review request is not found", async () => { + // Arrange + MockReviewRequestRepository.findOne.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const actual = await ReviewRequestService.getReviewRequest( + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(actual).toBeInstanceOf(RequestNotFoundError) + expect(MockReviewRequestRepository.findOne).toHaveBeenCalled() + }) + }) + + describe("getLatestMergedReviewRequest", () => { + it("should return the review request object if it exists", async () => { + // Arrange + const mockMergedReviewRequest = _.set( + _.clone(MOCK_REVIEW_REQUEST_ONE), + "reviewStatus", + ReviewRequestStatus.Merged + ) + MockReviewRequestRepository.findOne.mockResolvedValueOnce( + mockMergedReviewRequest + ) + + // Act + const actual = await ReviewRequestService.getLatestMergedReviewRequest( + mockSiteOrmResponseWithAllCollaborators as Attributes + ) + + // Assert + expect(actual).toEqual(mockMergedReviewRequest) + expect(MockReviewRequestRepository.findOne).toHaveBeenCalled() + }) + + it("should return an error if the review request is not found", async () => { + // Arrange + MockReviewRequestRepository.findOne.mockResolvedValueOnce( + new RequestNotFoundError() + ) + + // Act + const actual = await ReviewRequestService.getLatestMergedReviewRequest( + mockSiteOrmResponseWithAllCollaborators as Attributes + ) + + // Assert + expect(actual).toBeInstanceOf(RequestNotFoundError) + expect(MockReviewRequestRepository.findOne).toHaveBeenCalled() + }) + }) + + describe("getFullReviewRequest", () => { + it("should return the full review request object successfully", async () => { + // Arrange + const mockCommitDiff = { + files: [], + commits: [], + } + const expected = { + reviewUrl: MOCK_REVIEW_REQUEST_ONE.reviewMeta.reviewLink, + title: MOCK_PULL_REQUEST_ONE.title, + status: MOCK_REVIEW_REQUEST_ONE.reviewStatus, + requestor: MOCK_IDENTITY_EMAIL_ONE, + reviewers: [MOCK_IDENTITY_EMAIL_TWO, MOCK_IDENTITY_EMAIL_THREE], + reviewRequestedTime: new Date( + MOCK_PULL_REQUEST_ONE.created_at + ).getTime(), + changedItems: [], + } + MockReviewRequestRepository.findOne.mockResolvedValueOnce( + MOCK_REVIEW_REQUEST_ONE + ) + MockReviewApi.getPullRequest.mockResolvedValueOnce(MOCK_PULL_REQUEST_ONE) + MockReviewApi.getCommitDiff.mockResolvedValueOnce(mockCommitDiff) + + // Act + const actual = await ReviewRequestService.getFullReviewRequest( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockReviewRequestRepository.findOne).toHaveBeenCalled() + expect(MockReviewApi.getPullRequest).toHaveBeenCalled() + expect(MockReviewApi.getCommitDiff).toHaveBeenCalled() + }) + + it("should return an error if the review request is not found", async () => { + // Arrange + MockReviewRequestRepository.findOne.mockResolvedValueOnce(null) + + // Act + const actual = await ReviewRequestService.getFullReviewRequest( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(actual).toBeInstanceOf(RequestNotFoundError) + expect(MockReviewRequestRepository.findOne).toHaveBeenCalled() + expect(MockReviewApi.getPullRequest).not.toHaveBeenCalled() + expect(MockReviewApi.getCommitDiff).not.toHaveBeenCalled() + }) + }) + + describe("updateReviewRequest", () => { + it("should update the review request with the reviewers successfully", async () => { + // Arrange + const mockReviewers = [mockCollaboratorAdmin1, mockCollaboratorAdmin2] + + // Act + await ReviewRequestService.updateReviewRequest( + MockReviewRequest as Attributes, + { + reviewers: mockReviewers, + } + ) + + // Assert + expect(MockReviewRequest.$set).toHaveBeenCalledWith( + "reviewers", + mockReviewers + ) + expect(MockReviewRequest.save).toHaveBeenCalled() + }) + }) + + describe("approveReviewRequest", () => { + it("should update the status of the review request to approved successfully", async () => { + // Arrange + const mockReviewRequestOpen = _.clone(MockReviewRequest) + + // Act + await ReviewRequestService.approveReviewRequest(mockReviewRequestOpen) + + // Assert + expect(mockReviewRequestOpen.reviewStatus).toEqual( + ReviewRequestStatus.Approved + ) + expect(mockReviewRequestOpen.save).toHaveBeenCalled() + }) + }) + + describe("deleteReviewRequestApproval", () => { + it("should delete the review request approval successfully", async () => { + // Arrange + const mockReviewRequestApproved = _.set( + _.clone(MockReviewRequest), + "reviewStatus", + ReviewRequestStatus.Approved + ) + + // Act + await ReviewRequestService.deleteReviewRequestApproval( + mockReviewRequestApproved + ) + + // Assert + expect(mockReviewRequestApproved.reviewStatus).toEqual( + ReviewRequestStatus.Open + ) + expect(mockReviewRequestApproved.save).toHaveBeenCalled() + }) + }) + + describe("closeReviewRequest", () => { + it("should close the review request successfully", async () => { + // Arrange + const mockReviewRequestOpen = _.clone(MockReviewRequest) + MockReviewApi.closeReviewRequest.mockResolvedValueOnce(undefined) + + // Act + await ReviewRequestService.closeReviewRequest(mockReviewRequestOpen) + + // Assert + expect(MockReviewApi.closeReviewRequest).toHaveBeenCalled() + expect(mockReviewRequestOpen.reviewStatus).toEqual( + ReviewRequestStatus.Closed + ) + expect(mockReviewRequestOpen.save).toHaveBeenCalled() + }) + }) + + describe("mergeReviewRequest", () => { + it("should merge the review request successfully", async () => { + // Arrange + const mockReviewRequestOpen = _.clone(MockReviewRequest) + MockReviewApi.approvePullRequest.mockResolvedValueOnce(undefined) + MockReviewApi.mergePullRequest.mockResolvedValueOnce(undefined) + + // Act + await ReviewRequestService.mergeReviewRequest(mockReviewRequestOpen) + + // Assert + // NOTE: The mockReviewRequestOpen is modified in-place, so we need to + // check the attribute against the expected value + expect(mockReviewRequestOpen.reviewStatus).toEqual( + ReviewRequestStatus.Merged + ) + expect(MockReviewApi.approvePullRequest).toHaveBeenCalled() + expect(MockReviewApi.mergePullRequest).toHaveBeenCalled() + expect(mockReviewRequestOpen.save).toHaveBeenCalled() + }) + }) + + describe("createComment", () => { + it("should create a new comment successfully", async () => { + // Arrange + MockReviewApi.createComment.mockResolvedValueOnce(undefined) + + // Act + await ReviewRequestService.createComment( + mockUserWithSiteSessionData, + MOCK_REVIEW_REQUEST_ONE.id, + MOCK_GITHUB_COMMENT_ONE + ) + + // Assert + expect(MockReviewApi.createComment).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + MOCK_REVIEW_REQUEST_ONE.id, + mockUserWithSiteSessionData.isomerUserId, + MOCK_GITHUB_COMMENT_ONE + ) + }) + }) + + describe("getComments", () => { + it("should return an array of valid comment objects", async () => { + // Arrange + const mockComments: GithubCommentData[] = [ + MOCK_GITHUB_COMMENT_DATA_ONE, + MOCK_GITHUB_COMMENT_DATA_TWO, + ] + const expected = [ + { + user: MOCK_GITHUB_EMAIL_ADDRESS_ONE, + message: MOCK_GITHUB_COMMENT_ONE, + createdAt: new Date(MOCK_GITHUB_DATE_ONE).getTime(), + isRead: true, + }, + { + user: MOCK_GITHUB_EMAIL_ADDRESS_TWO, + message: MOCK_GITHUB_COMMENT_TWO, + createdAt: new Date(MOCK_GITHUB_DATE_TWO).getTime(), + isRead: false, + }, + ] + MockReviewApi.getComments.mockResolvedValueOnce(mockComments) + MockReviewRequestViewRepository.findOne.mockResolvedValueOnce( + MOCK_REVIEW_REQUEST_VIEW_ONE + ) + MockUsersRepository.findByPk.mockResolvedValueOnce( + MOCK_GITHUB_COMMIT_AUTHOR_ONE + ) + MockUsersRepository.findByPk.mockResolvedValueOnce( + MOCK_GITHUB_COMMIT_AUTHOR_TWO + ) + + // Act + const actual = await ReviewRequestService.getComments( + mockUserWithSiteSessionData, + mockSiteOrmResponseWithAllCollaborators as Attributes, + MOCK_REVIEW_REQUEST_ONE.id + ) + + // Assert + expect(actual).toEqual(expected) + expect(MockReviewApi.getComments).toHaveBeenCalledWith( + mockUserWithSiteSessionData.siteName, + MOCK_REVIEW_REQUEST_ONE.id + ) + expect(MockReviewRequestViewRepository.findOne).toHaveBeenCalled() + expect(SpyReviewRequestService.computeCommentData).toHaveBeenCalledWith( + mockComments, + MOCK_REVIEW_REQUEST_VIEW_ONE.lastViewedAt + ) + }) + }) +}) diff --git a/src/services/utilServices/AuthService.js b/src/services/utilServices/AuthService.js index fe15ab1e9..d7592415e 100644 --- a/src/services/utilServices/AuthService.js +++ b/src/services/utilServices/AuthService.js @@ -2,6 +2,8 @@ const axios = require("axios") const queryString = require("query-string") const uuid = require("uuid/v4") +const { config } = require("@config/config") + // Import error types const { AuthError } = require("@errors/AuthError") const { ForbiddenError } = require("@errors/ForbiddenError") @@ -9,9 +11,18 @@ const { ForbiddenError } = require("@errors/ForbiddenError") const validateStatus = require("@utils/axios-utils") const jwtUtils = require("@utils/jwt-utils") +const { + E2E_ISOMER_ID, + E2E_TEST_CONTACT, + E2E_TEST_EMAIL, +} = require("@root/constants") +const { BadRequestError } = require("@root/errors/BadRequestError") const logger = require("@root/logger/logger") +const { isError } = require("@root/types") -const { CLIENT_ID, CLIENT_SECRET, REDIRECT_URI } = process.env +const CLIENT_ID = config.get("github.clientId") +const CLIENT_SECRET = config.get("github.clientSecret") +const REDIRECT_URI = config.get("github.redirectUri") class AuthService { constructor({ usersService }) { @@ -27,7 +38,7 @@ class AuthService { return { redirectUrl: githubAuthUrl, cookieToken: token } } - async getGithubAuthToken({ csrfState, code, state }) { + async getUserInfoFromGithubAuth({ csrfState, code, state }) { try { const decoded = jwtUtils.verifyToken(csrfState) if (decoded.state !== state) { @@ -80,27 +91,64 @@ class AuthService { const user = await this.usersService.login(githubId) if (!user) throw Error("Failed to create user") - const token = jwtUtils.signToken({ - access_token: jwtUtils.encryptToken(accessToken), - user_id: githubId, - isomer_user_id: user.id, - }) + const userInfo = { + accessToken: jwtUtils.encryptToken(accessToken), + githubId, + isomerUserId: user.id, + email: user.email, + } + + return userInfo + } - return token + async sendOtp(email) { + const isValidEmail = await this.usersService.canSendEmailOtp(email) + if (!isValidEmail) + throw new AuthError( + "Please sign in with a gov.sg or other whitelisted email." + ) + try { + await this.usersService.sendEmailOtp(email) + } catch (err) { + if (isError(err)) { + logger.error(err.message) + throw new BadRequestError(err.message) + } else { + // If we encountered something that isn't an error but still ends up in the error branch, + // log this to cloudwatch with the relevant details + logger.error( + `Encountered unknown error type: ${err} when sendEmailOtp with email: ${email}` + ) + } + } } - async getUserInfo({ accessToken }) { - // Make a call to github - const endpoint = "https://api.github.com/user" + async verifyOtp({ email, otp }) { + const isOtpValid = await this.usersService.verifyEmailOtp(email, otp) + if (!isOtpValid) { + throw new BadRequestError("You have entered an invalid OTP.") + } + // Create user if does not exists. Set last logged in to current time. + const user = await this.usersService.loginWithEmail(email) + const userInfo = { + isomerUserId: user.id, + email: user.email, + } + return userInfo + } + + async getUserInfo(sessionData) { try { - const resp = await axios.get(endpoint, { - headers: { - Authorization: `token ${accessToken}`, - "Content-Type": "application/json", - }, - }) - const userId = resp.data.login + if (sessionData.isomerUserId === E2E_ISOMER_ID) { + return { email: E2E_TEST_EMAIL, contactNumber: E2E_TEST_CONTACT } + } + if (sessionData.isEmailUser()) { + const { email } = sessionData + const { contactNumber } = await this.usersService.findByEmail(email) + return { email, contactNumber } + } + const { githubId: userId } = sessionData const { email, contactNumber } = await this.usersService.findByGitHubId( userId diff --git a/src/services/utilServices/MailClient.ts b/src/services/utilServices/MailClient.ts index 50c519c93..99f8ab1cc 100644 --- a/src/services/utilServices/MailClient.ts +++ b/src/services/utilServices/MailClient.ts @@ -1,5 +1,7 @@ import axios from "axios" +import { config } from "@config/config" + import logger from "@logger/logger" const POSTMAN_API_URL = "https://api.postman.gov.sg/v1" @@ -32,27 +34,20 @@ class MailClient { }, }) } catch (err) { - logger.error(err) + logger.error(`Error occurred when sending email to ${recipient}: ${err}`) throw new Error("Failed to send email.") } } } export default MailClient -const { NODE_ENV, POSTMAN_API_KEY } = process.env - -const IS_LOCAL_DEV = NODE_ENV === "LOCAL_DEV" +const NODE_ENV = config.get("env") +const POSTMAN_API_KEY = config.get("postman.apiKey") -if (!POSTMAN_API_KEY && !IS_LOCAL_DEV) { - throw new Error( - "Please ensure that you have set POSTMAN_API_KEY in your env vars and that you have sourced them!" - ) -} +const IS_DEV = NODE_ENV === "dev" const mockMailer = { sendMail: (email: string, subject: string, html: string) => logger.info(`Mock email sent to <${email}>, subject: ${subject}\n${html}`), } as MailClient -export const mailer = IS_LOCAL_DEV - ? mockMailer - : new MailClient(POSTMAN_API_KEY!) +export const mailer = IS_DEV ? mockMailer : new MailClient(POSTMAN_API_KEY) diff --git a/src/services/utilServices/RateLimiter.ts b/src/services/utilServices/RateLimiter.ts new file mode 100644 index 000000000..e8caf981a --- /dev/null +++ b/src/services/utilServices/RateLimiter.ts @@ -0,0 +1,20 @@ +import rateLimit from "express-rate-limit" + +import { config } from "@config/config" + +const DEFAULT_AUTH_TOKEN_EXPIRY_MILLISECONDS = 900000 + +// NOTE: Refer here for more information regarding the implementation: +// https://github.com/express-rate-limit/express-rate-limit +// Also, note that our production environment has 2 instances +// and the rate limiter uses an in memory store, +// so our effective limit is 100 * 2. +// This also implies that a client can hit the limit on 1 server +// but not on the other, leading to inconsistent behaviour. +// eslint-disable-next-line import/prefer-default-export +export const rateLimiter = rateLimit({ + windowMs: config.get("auth.tokenExpiry") / (1000 * 60), + max: 100, // Limit each IP to 100 requests per `window` (here, per 15 minutes) + standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers + legacyHeaders: false, // Disable the `X-RateLimit-*` headers +}) diff --git a/src/services/utilServices/SitesService.js b/src/services/utilServices/SitesService.js deleted file mode 100644 index 731c0a463..000000000 --- a/src/services/utilServices/SitesService.js +++ /dev/null @@ -1,120 +0,0 @@ -const Bluebird = require("bluebird") -const _ = require("lodash") - -const { NotFoundError } = require("@root/errors/NotFoundError") -const { - genericGitHubAxiosInstance, -} = require("@root/services/api/AxiosInstance") - -const GH_MAX_REPO_COUNT = 100 -const ISOMERPAGES_REPO_PAGE_COUNT = - parseInt(process.env.ISOMERPAGES_REPO_PAGE_COUNT) || 3 -const ISOMER_GITHUB_ORG_NAME = process.env.GITHUB_ORG_NAME -const ISOMER_ADMIN_REPOS = [ - "isomercms-backend", - "isomercms-frontend", - "isomer-redirection", - "isomerpages-template", - "isomer-conversion-scripts", - "isomer-wysiwyg", - "isomer-slackbot", - "isomer-tooling", - "generate-site", - "travisci-scripts", - "recommender-train", - "editor", - "ci-test", - "infra", - "markdown-helper", -] - -class SitesService { - constructor({ gitHubService, configYmlService }) { - this.githubService = gitHubService - this.configYmlService = configYmlService - } - - async getSites({ accessToken }) { - const endpoint = `https://api.github.com/orgs/${ISOMER_GITHUB_ORG_NAME}/repos` - - // Simultaneously retrieve all isomerpages repos - const paramsArr = _.fill(Array(ISOMERPAGES_REPO_PAGE_COUNT), null).map( - (_, idx) => ({ - per_page: GH_MAX_REPO_COUNT, - sort: "full_name", - page: idx + 1, - }) - ) - - const sites = await Bluebird.map(paramsArr, async (params) => { - const { data: respData } = await genericGitHubAxiosInstance.get( - endpoint, - { - params, - headers: { - Authorization: `token ${accessToken}`, - }, - } - ) - - return respData - .map((repoData) => { - const { - pushed_at: updatedAt, - permissions, - name, - private: isPrivate, - } = repoData - - return { - lastUpdated: updatedAt, - permissions, - repoName: name, - isPrivate, - } - }) - .filter( - (repoData) => - repoData.permissions.push === true && - !ISOMER_ADMIN_REPOS.includes(repoData.repoName) - ) - }) - - return _.flatten(sites) - } - - async checkHasAccess(reqDetails, { userId }) { - await this.githubService.checkHasAccess(reqDetails, { userId }) - } - - async getLastUpdated(reqDetails) { - const { pushed_at: updatedAt } = await this.githubService.getRepoInfo( - reqDetails - ) - return updatedAt - } - - async getStagingUrl(reqDetails) { - // Check config.yml for staging url if it exists, and github site description otherwise - const { content: configData } = await this.configYmlService.read(reqDetails) - if ("staging" in configData) return configData.staging - - const { description } = await this.githubService.getRepoInfo(reqDetails) - - if (description) { - // Retrieve the url from the description - repo descriptions have varying formats, so we look for the first link - const descTokens = description.replace("/;/g", " ").split(" ") - // Staging urls also contain staging in their url - const stagingUrl = descTokens.find( - (token) => token.includes("http") && token.includes("staging") - ) - if (stagingUrl) return stagingUrl - } - - throw new NotFoundError(`${reqDetails.siteName} has no staging url`) - } -} - -module.exports = { - SitesService, -} diff --git a/src/services/utilServices/__tests__/AuthService.spec.js b/src/services/utilServices/__tests__/AuthService.spec.js index f4cb468ea..6de53c960 100644 --- a/src/services/utilServices/__tests__/AuthService.spec.js +++ b/src/services/utilServices/__tests__/AuthService.spec.js @@ -8,25 +8,44 @@ jest.mock("@utils/jwt-utils") const axios = require("axios") const uuid = require("uuid/v4") +const { config } = require("@config/config") + +const { AuthError } = require("@errors/AuthError") +const { BadRequestError } = require("@errors/BadRequestError") + +const validateStatus = require("@utils/axios-utils") const jwtUtils = require("@utils/jwt-utils") -const validateStatus = require("@root/utils/axios-utils") +const { + mockUserWithSiteSessionData, + mockGithubId, + mockEmail, + mockIsomerUserId, + mockGithubId: mockUserId, + mockSessionDataEmailUser, +} = require("@fixtures/sessionData") +const { OtpType } = require("@root/services/identity/UsersService") const { AuthService } = require("@services/utilServices/AuthService") describe("Auth Service", () => { - const { CLIENT_ID, CLIENT_SECRET, REDIRECT_URI } = process.env + const CLIENT_ID = config.get("github.clientId") + const CLIENT_SECRET = config.get("github.clientSecret") + const REDIRECT_URI = config.get("github.redirectUri") const accessToken = "test-token" const state = "state" const token = "token" - const signedToken = "signedToken" + const signedGithubToken = { + accessToken: token, + githubId: mockGithubId, + } + const signedEmailToken = { + email: mockEmail, + isomerUserId: mockIsomerUserId, + } const csrfState = "csrfState" - const userId = "user" - const mockEmail = "email" const mockContactNumber = "12345678" - const mockIsomerUserId = "isomer-user" - const mockUserId = "user" const mockUsersService = { login: jest.fn().mockImplementation(() => mockIsomerUserId), @@ -34,6 +53,15 @@ describe("Auth Service", () => { email: mockEmail, contactNumber: mockContactNumber, })), + findByEmail: jest + .fn() + .mockImplementation(() => ({ contactNumber: mockContactNumber })), + canSendEmailOtp: jest.fn(), + sendEmailOtp: jest.fn(), + verifyEmailOtp: jest.fn(), + loginWithEmail: jest + .fn() + .mockImplementation(() => ({ id: mockIsomerUserId, email: mockEmail })), } const service = new AuthService({ usersService: mockUsersService }) @@ -53,7 +81,7 @@ describe("Auth Service", () => { }) }) - describe("getGithubAuthToken", () => { + describe("getUserInfoFromGithubAuth", () => { it("Retrieves the Github auth token", async () => { const params = { code: "code", @@ -64,7 +92,6 @@ describe("Auth Service", () => { uuid.mockImplementation(() => state) jwtUtils.verifyToken.mockImplementation(() => ({ state })) jwtUtils.encryptToken.mockImplementation(() => token) - jwtUtils.signToken.mockImplementation(() => signedToken) axios.post.mockImplementation(() => ({ data: `access_token=${accessToken}`, })) @@ -75,8 +102,8 @@ describe("Auth Service", () => { })) await expect( - service.getGithubAuthToken({ csrfState, code: "code", state }) - ).resolves.toEqual(signedToken) + service.getUserInfoFromGithubAuth({ csrfState, code: "code", state }) + ).resolves.toEqual(signedGithubToken) expect(axios.post).toHaveBeenCalledWith( "https://github.com/login/oauth/access_token", @@ -100,19 +127,77 @@ describe("Auth Service", () => { }) }) + describe("sendOtp", () => { + it("should be able to send otp for whitelisted users", async () => { + mockUsersService.canSendEmailOtp.mockImplementationOnce(() => true) + + await expect(service.sendOtp(mockEmail)).resolves.not.toThrow() + expect(mockUsersService.canSendEmailOtp).toHaveBeenCalledWith(mockEmail) + expect(mockUsersService.sendEmailOtp).toHaveBeenCalledWith(mockEmail) + }) + + it("should throw an error for non-whitelisted users", async () => { + mockUsersService.canSendEmailOtp.mockImplementationOnce(() => false) + + await expect(service.sendOtp(mockEmail)).rejects.toThrow(AuthError) + expect(mockUsersService.canSendEmailOtp).toHaveBeenCalledWith(mockEmail) + }) + }) + + describe("verifyOtp", () => { + const mockOtp = "123456" + it("should be able to verify otp, login, and return token if correct", async () => { + mockUsersService.verifyEmailOtp.mockImplementationOnce(() => true) + jwtUtils.signToken.mockImplementationOnce(() => signedEmailToken) + + await expect( + service.verifyOtp({ email: mockEmail, otp: mockOtp }) + ).resolves.toEqual(signedEmailToken) + expect(mockUsersService.verifyEmailOtp).toHaveBeenCalledWith( + mockEmail, + mockOtp + ) + expect(mockUsersService.loginWithEmail).toHaveBeenCalledWith(mockEmail) + }) + + it("should throw an error if otp is incorrect", async () => { + mockUsersService.verifyEmailOtp.mockImplementationOnce(() => false) + + await expect( + service.verifyOtp({ email: mockEmail, otp: mockOtp }) + ).rejects.toThrow(BadRequestError) + expect(mockUsersService.verifyEmailOtp).toHaveBeenCalledWith( + mockEmail, + mockOtp + ) + }) + }) + describe("getUserInfo", () => { - it("should be able to retrieve user info", async () => { + it("should be able to retrieve user info for github users", async () => { axios.get.mockImplementation(() => ({ data: { - login: userId, + login: mockGithubId, }, })) - await expect(service.getUserInfo({ accessToken })).resolves.toEqual({ - userId, + await expect( + service.getUserInfo(mockUserWithSiteSessionData) + ).resolves.toEqual({ + userId: mockGithubId, + email: mockEmail, + contactNumber: mockContactNumber, + }) + expect(mockUsersService.findByGitHubId).toHaveBeenCalledWith(mockGithubId) + }) + + it("should be able to retrieve user info for email users", async () => { + await expect( + service.getUserInfo(mockSessionDataEmailUser) + ).resolves.toEqual({ email: mockEmail, contactNumber: mockContactNumber, }) - expect(mockUsersService.findByGitHubId).toHaveBeenCalledWith(userId) + expect(mockUsersService.findByEmail).toHaveBeenCalledWith(mockEmail) }) }) }) diff --git a/src/services/utilServices/__tests__/MailClient.spec.ts b/src/services/utilServices/__tests__/MailClient.spec.ts index 658853730..624d8ea95 100644 --- a/src/services/utilServices/__tests__/MailClient.spec.ts +++ b/src/services/utilServices/__tests__/MailClient.spec.ts @@ -1,5 +1,7 @@ import mockAxios from "jest-mock-axios" +import { config } from "@config/config" + import { mockRecipient, mockSubject, @@ -10,7 +12,7 @@ import _MailClient from "@services/utilServices/MailClient" const mockEndpoint = "https://api.postman.gov.sg/v1/transactional/email/send" -const MailClient = new _MailClient(process.env.POSTMAN_API_KEY!) +const MailClient = new _MailClient(config.get("postman.apiKey")) const generateEmail = (recipient: string, subject: string, body: string) => ({ subject, diff --git a/src/services/utilServices/__tests__/RateLimiter.spec.ts b/src/services/utilServices/__tests__/RateLimiter.spec.ts new file mode 100644 index 000000000..a301e2e49 --- /dev/null +++ b/src/services/utilServices/__tests__/RateLimiter.spec.ts @@ -0,0 +1,31 @@ +import express from "express" +import rateLimit from "express-rate-limit" +import request from "supertest" + +describe("rate limiting", () => { + // NOTE: There is a need to initialise another rate limiter + // as the rate limit library uses an in-memory store for each instance. + // This means that the requests made in another test would also impact the rate limit. + const mockRateLimiter = rateLimit({ + windowMs: 15 * 60 * 1000, + max: 1, + }) + const rateLimitedRouter = express() + rateLimitedRouter.use(mockRateLimiter) + rateLimitedRouter.get("/test", (req, res) => { + res.status(200).send() + }) + + it("should allow all the requests through when the number of requests made is below the limit of 1", async () => { + // Act + assert + await request(rateLimitedRouter).get("/test").expect(200) + }) + + it("should disallow the 101th request made within the 15 minute window", async () => { + // Act + const resp = await request(rateLimitedRouter).get(`/test`).expect(429) + + // Assert + expect(resp.text).toBe("Too many requests, please try again later.") + }) +}) diff --git a/src/services/utilServices/__tests__/SitesService.spec.js b/src/services/utilServices/__tests__/SitesService.spec.js deleted file mode 100644 index 58e45a776..000000000 --- a/src/services/utilServices/__tests__/SitesService.spec.js +++ /dev/null @@ -1,149 +0,0 @@ -const { NotFoundError } = require("@errors/NotFoundError") - -const { - repoInfo, - repoInfo2, - adminRepo, - noAccessRepo, -} = require("@fixtures/repoInfo") -const { - genericGitHubAxiosInstance, -} = require("@root/services/api/AxiosInstance") - -describe("Resource Page Service", () => { - const siteName = "test-site" - const accessToken = "test-token" - const userId = "userId" - - const reqDetails = { siteName, accessToken } - - const mockGithubService = { - checkHasAccess: jest.fn(), - getRepoInfo: jest.fn(), - } - - const mockConfigYmlService = { - read: jest.fn(), - } - - const { SitesService } = require("@services/utilServices/SitesService") - const service = new SitesService({ - gitHubService: mockGithubService, - configYmlService: mockConfigYmlService, - }) - - beforeEach(() => { - jest.clearAllMocks() - }) - - describe("getSites", () => { - it("Filters accessible sites correctly", async () => { - // Store the API key and set it later so that other tests are not affected - const currRepoCount = process.env.ISOMERPAGES_REPO_PAGE_COUNT - process.env.ISOMERPAGES_REPO_PAGE_COUNT = 3 - - const expectedResp = [ - { - lastUpdated: repoInfo.pushed_at, - permissions: repoInfo.permissions, - repoName: repoInfo.name, - isPrivate: repoInfo.private, - }, - { - lastUpdated: repoInfo2.pushed_at, - permissions: repoInfo2.permissions, - repoName: repoInfo2.name, - isPrivate: repoInfo2.private, - }, - ] - genericGitHubAxiosInstance.get.mockImplementationOnce(() => ({ - data: [repoInfo, repoInfo2, adminRepo, noAccessRepo], - })) - genericGitHubAxiosInstance.get.mockImplementationOnce(() => ({ - data: [], - })) - genericGitHubAxiosInstance.get.mockImplementationOnce(() => ({ - data: [], - })) - - await expect(service.getSites({ accessToken })).resolves.toMatchObject( - expectedResp - ) - - expect(genericGitHubAxiosInstance.get).toHaveBeenCalledTimes(3) - process.env.ISOMERPAGES_REPO_PAGE_COUNT = currRepoCount - expect(process.env.ISOMERPAGES_REPO_PAGE_COUNT).toBe(currRepoCount) - }) - }) - - describe("checkHasAccess", () => { - it("Checks if a user has access to a site", async () => { - await expect( - service.checkHasAccess(reqDetails, { userId }) - ).resolves.not.toThrow() - - expect(mockGithubService.checkHasAccess).toHaveBeenCalledWith( - reqDetails, - { userId } - ) - }) - }) - - describe("getLastUpdated", () => { - it("Checks when site was last updated", async () => { - mockGithubService.getRepoInfo.mockResolvedValue(repoInfo) - - await expect(service.getLastUpdated(reqDetails)).resolves.toEqual( - repoInfo.pushed_at - ) - - expect(mockGithubService.getRepoInfo).toHaveBeenCalledWith(reqDetails) - }) - }) - - describe("getStagingUrl", () => { - const stagingUrl = "https://repo-staging.netlify.app" - it("Retrieves the staging url for a site from config if available with higher priority over the description", async () => { - mockConfigYmlService.read.mockResolvedValue({ - content: { - staging: stagingUrl, - }, - }) - mockGithubService.getRepoInfo.mockResolvedValue(repoInfo2) - - await expect(service.getStagingUrl(reqDetails)).resolves.toEqual( - stagingUrl - ) - - expect(mockConfigYmlService.read).toHaveBeenCalledWith(reqDetails) - }) - it("Retrieves the staging url for a site from repo info otherwise", async () => { - mockConfigYmlService.read.mockResolvedValue({ - content: {}, - }) - mockGithubService.getRepoInfo.mockResolvedValue(repoInfo) - - await expect(service.getStagingUrl(reqDetails)).resolves.toEqual( - stagingUrl - ) - - expect(mockConfigYmlService.read).toHaveBeenCalledWith(reqDetails) - expect(mockGithubService.getRepoInfo).toHaveBeenCalledWith(reqDetails) - }) - it("throws an error when the staging url for a repo is not found", async () => { - mockConfigYmlService.read.mockResolvedValue({ - content: {}, - }) - mockGithubService.getRepoInfo.mockResolvedValue({ - description: "edited description", - }) - - await expect(service.getStagingUrl(reqDetails)).rejects.toThrowError( - NotFoundError - ) - - expect(mockConfigYmlService.read).toHaveBeenCalledWith(reqDetails) - expect(mockGithubService.getRepoInfo).toHaveBeenCalledWith(reqDetails) - }) - }) -}) diff --git a/src/tests/database.ts b/src/tests/database.ts index 6030e85f9..08bd04eeb 100644 --- a/src/tests/database.ts +++ b/src/tests/database.ts @@ -8,9 +8,16 @@ import { Whitelist, AccessToken, Repo, + Otp, Deployment, Launch, Redirection, + IsomerAdmin, + Notification, + ReviewRequest, + ReviewMeta, + Reviewer, + ReviewRequestView, } from "@database/models" const sequelize = new Sequelize({ @@ -24,9 +31,16 @@ sequelize.addModels([ Whitelist, AccessToken, Repo, + Otp, Deployment, Launch, Redirection, + IsomerAdmin, + Notification, + ReviewRequest, + ReviewMeta, + Reviewer, + ReviewRequestView, ]) // eslint-disable-next-line import/prefer-default-export diff --git a/src/tests/teardown.ts b/src/tests/teardown.ts index 480bcd8f4..34c1379e9 100644 --- a/src/tests/teardown.ts +++ b/src/tests/teardown.ts @@ -4,6 +4,37 @@ const teardownDb = async () => { console.log("tearing down database tables") await sequelize.getQueryInterface().dropAllTables() await sequelize.getQueryInterface().dropAllEnums() + + /** + * We currently depend on `sequelize.dropAllTables();` during our test database clean up. + * + * However, there appears to be a bug with Sequelize - + * or at least Sequelize has made some assumptions + * about the enum names for tables with multi-part column names. + * + * Specifically, when Sequelize attempts to drop the sites table, + * it assumes that the enum names for the `site_status` and `job_status` columns are + * `enum_sites_siteStatus` and `enum_sites_jobStatus` respectively. + * + * Unfortunately, we have named our enums `enum_sites_site_status` and + * `enum_sites_job_status` instead. + * + * Hence, we need to add the following SQL statements to ensure that the abovementioned + * enums are dropped properly. + * + * 'DROP TYPE IF EXISTS "public"."enum_sites_site_status" CASCADE;' + * 'DROP TYPE IF EXISTS "public"."enum_sites_job_status" CASCADE;' + */ + + await sequelize.query( + 'DROP TYPE IF EXISTS "public"."enum_sites_site_status" CASCADE;' + ) + await sequelize.query( + 'DROP TYPE IF EXISTS "public"."enum_sites_job_status" CASCADE;' + ) + await sequelize.query( + 'DROP TYPE IF EXISTS "public"."enum_review_requests_review_status" CASCADE;' + ) await sequelize.close() console.log("done, exiting...") } diff --git a/src/types/commitData.ts b/src/types/commitData.ts new file mode 100644 index 000000000..be07cc1a5 --- /dev/null +++ b/src/types/commitData.ts @@ -0,0 +1,8 @@ +export type GitHubCommitData = { + author: { + name: string + email: string + date: string + } + message: string +} diff --git a/src/types/configYml.ts b/src/types/configYml.ts new file mode 100644 index 000000000..3f48cfa76 --- /dev/null +++ b/src/types/configYml.ts @@ -0,0 +1,4 @@ +export type ConfigYmlData = { + staging?: string + prod?: string +} diff --git a/src/types/dto/error.ts b/src/types/dto/error.ts new file mode 100644 index 000000000..9ffe2c556 --- /dev/null +++ b/src/types/dto/error.ts @@ -0,0 +1,3 @@ +export interface ResponseErrorBody { + message: string +} diff --git a/src/types/dto/review.ts b/src/types/dto/review.ts new file mode 100644 index 000000000..4fba8a4cb --- /dev/null +++ b/src/types/dto/review.ts @@ -0,0 +1,65 @@ +import { CollaboratorRoles } from "@constants/constants" + +export type ReviewRequestStatus = "OPEN" | "APPROVED" | "MERGED" | "CLOSED" + +export type FileType = "page" | "nav" | "setting" | "file" | "image" + +export interface EditedItemDto { + type: FileType[] + name: string + path: string[] + url: string + lastEditedBy: string + lastEditedTime: number +} + +export interface UserDto { + email: string + role: CollaboratorRoles + id: string + lastLoggedIn: string +} + +export type DashboardReviewRequestDto = { + id: number + title: string + description: string + author: string + status: ReviewRequestStatus + changedFiles: number + newComments: number + firstView: boolean + createdAt: number // Unix timestamp +} + +export interface ReviewRequestDto { + reviewUrl: string + title: string + requestor: string + reviewers: string[] + reviewRequestedTime: number + status: ReviewRequestStatus + changedItems: EditedItemDto[] +} + +export interface UpdateReviewRequestDto { + reviewers: string[] +} + +export interface CommentItem { + user: string + createdAt: number + message: string + isRead: boolean +} + +export interface GithubCommentData { + userId: string + message: string + createdAt: string +} + +export interface BlobDiffDto { + oldValue: string + newValue: string +} diff --git a/src/types/error.ts b/src/types/error.ts index 86de13942..7fe1e9c9b 100644 --- a/src/types/error.ts +++ b/src/types/error.ts @@ -1,2 +1,7 @@ +import { BaseIsomerError } from "@root/errors/BaseError" + // eslint-disable-next-line import/prefer-default-export export const isError = (e: unknown): e is Error => e instanceof Error + +export const isIsomerError = (e: unknown): e is BaseIsomerError => + isError(e) && !!(e as BaseIsomerError).isIsomerError diff --git a/src/types/express/session.d.ts b/src/types/express/session.d.ts new file mode 100644 index 000000000..a77f0d2b8 --- /dev/null +++ b/src/types/express/session.d.ts @@ -0,0 +1,8 @@ +export interface SessionData { + userInfo: { + accessToken?: string + githubId?: string + isomerUserId: string + email: string + } +} diff --git a/src/types/github.ts b/src/types/github.ts new file mode 100644 index 000000000..1f0c57038 --- /dev/null +++ b/src/types/github.ts @@ -0,0 +1,88 @@ +// NOTE: Types here are with reference to: +// https://docs.github.com/en/rest/commits/commits#compare-two-commits + +export type FileChangeStatus = + | "added" + | "removed" + | "modified" + | "renamed" + | "copied" + | "changed" + | "unchanged" + +export interface Author { + name: string + email: string + date: string +} + +export interface RawCommit { + url: string + author: Author + // NOTE: message is assumed to have a JSON structure with + // the field `email` existing. + // Moreover, this field is assumed to point to the + // author of the commit. + message: string +} + +export interface Commit { + url: string + sha: string + commit: RawCommit +} + +export interface RawFileChangeInfo { + sha: string + filename: string + status: FileChangeStatus + additions: number + deletions: number + changes: number + // eslint-disable-next-line camelcase + blob_url: string + // eslint-disable-next-line camelcase + raw_url: string + // eslint-disable-next-line camelcase + contents_url: string +} + +export interface IsomerCommitMessage { + message: string + fileName: string + userId: string +} + +/** + * NOTE: Properties can be undefined and caller should validate/give sane default. + * + * This should happen as our current format is not backward compat + * as this implies we rewrite all existing commit messages to have this format. + * We should instead default to the one existing on Github. + */ +export const fromGithubCommitMessage = ( + message: string +): Partial => { + try { + const parsed = JSON.parse(message) + return { + message: parsed.message, + fileName: parsed.filename, + userId: parsed.userId, + } + } catch { + return {} + } +} + +export interface RawPullRequest { + title: string + body: string + changed_files: number + created_at: string +} + +export interface RawComment { + body: string + created_at: string +} diff --git a/src/types/repoInfo.ts b/src/types/repoInfo.ts new file mode 100644 index 000000000..1bd9e3ec6 --- /dev/null +++ b/src/types/repoInfo.ts @@ -0,0 +1,23 @@ +export type GitHubRepositoryData = { + name: string + private: boolean + description: string + pushed_at: string + permissions: { + admin: boolean + maintain: boolean + push: boolean + triage: boolean + pull: boolean + } +} + +export type RepositoryData = { + lastUpdated: GitHubRepositoryData["pushed_at"] + permissions: GitHubRepositoryData["permissions"] + repoName: GitHubRepositoryData["name"] + isPrivate: GitHubRepositoryData["private"] +} + +type SiteUrlTypes = "staging" | "prod" +export type SiteUrls = { [key in SiteUrlTypes]: string } diff --git a/src/types/request.ts b/src/types/request.ts index 66fd1feb3..22d34e903 100644 --- a/src/types/request.ts +++ b/src/types/request.ts @@ -5,5 +5,5 @@ export type RequestHandler< ResBody = unknown, ReqBody = unknown, ReqQuery = unknown, - Locals = Record + Locals extends Record = Record > = ExpressHandler diff --git a/src/types/review.ts b/src/types/review.ts new file mode 100644 index 000000000..cd78f0112 --- /dev/null +++ b/src/types/review.ts @@ -0,0 +1,15 @@ +import type { User } from "@root/database/models/User" + +import { RawFileChangeInfo } from "./github" + +export interface FileChangeInfo + extends Pick< + RawFileChangeInfo, + "additions" | "deletions" | "changes" | "status" | "filename" + > { + rawUrl: string +} + +export interface RequestChangeInfo { + reviewers: User[] +} diff --git a/src/types/siteInfo.ts b/src/types/siteInfo.ts new file mode 100644 index 000000000..c0dadba6f --- /dev/null +++ b/src/types/siteInfo.ts @@ -0,0 +1,8 @@ +export type SiteInfo = { + savedAt: number + savedBy: string + publishedAt: number + publishedBy: string + stagingUrl: string + siteUrl: string +} diff --git a/src/utils/__tests__/markdown-utils.spec.ts b/src/utils/__tests__/markdown-utils.spec.ts new file mode 100644 index 000000000..c8ad8e7d7 --- /dev/null +++ b/src/utils/__tests__/markdown-utils.spec.ts @@ -0,0 +1,39 @@ +import { + retrieveDataFromMarkdown, + convertDataToMarkdown, +} from "@utils/markdown-utils" + +import { + normalMarkdownContent, + maliciousMarkdownContent, + normalJsonObject, + maliciousJsonObject, +} from "@fixtures/markdown-fixtures" + +describe("Sanitized markdown utils test", () => { + it("should parse normal markdown content into an object successfully", () => { + expect(retrieveDataFromMarkdown(normalMarkdownContent)).toStrictEqual( + normalJsonObject + ) + }) + + it("should parse malicious markdown content into a sanitized object successfully", () => { + expect(retrieveDataFromMarkdown(maliciousMarkdownContent)).toStrictEqual( + normalJsonObject + ) + }) + + it("should stringify a normal JSON object into markdown content successfully", () => { + const { frontMatter, pageContent } = normalJsonObject + expect(convertDataToMarkdown(frontMatter, pageContent)).toBe( + normalMarkdownContent + ) + }) + + it("should stringify a malicious JSON object into sanitized markdown content successfully", () => { + const { frontMatter, pageContent } = maliciousJsonObject + expect(convertDataToMarkdown(frontMatter, pageContent)).toBe( + normalMarkdownContent + ) + }) +}) diff --git a/src/utils/__tests__/yaml-utils.spec.ts b/src/utils/__tests__/yaml-utils.spec.ts new file mode 100644 index 000000000..472099fcd --- /dev/null +++ b/src/utils/__tests__/yaml-utils.spec.ts @@ -0,0 +1,28 @@ +import { sanitizedYamlParse, sanitizedYamlStringify } from "@utils/yaml-utils" + +import { + normalYamlString, + maliciousYamlString, + normalYamlObject, + maliciousYamlObject, +} from "@fixtures/yaml-fixtures" + +describe("Sanitized yaml utils test", () => { + it("should parse a normal string into yaml content successfully", () => { + expect(sanitizedYamlParse(normalYamlString)).toStrictEqual(normalYamlObject) + }) + + it("should parse a malicious string into sanitized yaml content successfully", () => { + expect(sanitizedYamlParse(maliciousYamlString)).toStrictEqual( + normalYamlObject + ) + }) + + it("should stringify normal yaml content into a string successfully", () => { + expect(sanitizedYamlStringify(normalYamlObject)).toBe(normalYamlString) + }) + + it("should stringify malicious yaml content into a string successfully", () => { + expect(sanitizedYamlStringify(maliciousYamlObject)).toBe(normalYamlString) + }) +}) diff --git a/src/utils/auth-utils.js b/src/utils/auth-utils.js index 7b3cc8451..460823425 100644 --- a/src/utils/auth-utils.js +++ b/src/utils/auth-utils.js @@ -1,10 +1,8 @@ -function isSecure() { - return ( - process.env.NODE_ENV !== "DEV" && - process.env.NODE_ENV !== "LOCAL_DEV" && - process.env.NODE_ENV !== "test" - ) -} +const { config } = require("@config/config") + +const NODE_ENV = config.get("env") + +const isSecure = NODE_ENV !== "dev" && NODE_ENV !== "test" module.exports = { isSecure, diff --git a/src/utils/file-upload-utils.js b/src/utils/file-upload-utils.js index dbee488b2..55de9278d 100644 --- a/src/utils/file-upload-utils.js +++ b/src/utils/file-upload-utils.js @@ -1,10 +1,15 @@ -const createDOMPurify = require("dompurify") +import { config } from "@config/config" + +import logger from "@logger/logger" + +const CloudmersiveVirusApiClient = require("cloudmersive-virus-api-client") const FileType = require("file-type") const isSvg = require("is-svg") -const { JSDOM } = require("jsdom") +const DOMPurify = require("isomorphic-dompurify") + +const { BaseIsomerError } = require("@errors/BaseError") -const { window } = new JSDOM("") -const DOMPurify = createDOMPurify(window) +const CLOUDMERSIVE_API_KEY = config.get("cloudmersiveKey") const ALLOWED_FILE_EXTENSIONS = [ "pdf", @@ -15,9 +20,31 @@ const ALLOWED_FILE_EXTENSIONS = [ "bmp", "ico", ] +const defaultCloudmersiveClient = CloudmersiveVirusApiClient.ApiClient.instance + +// Configure API key authorization: Apikey +const apikey = defaultCloudmersiveClient.authentications.Apikey +apikey.apiKey = CLOUDMERSIVE_API_KEY + +const apiInstance = new CloudmersiveVirusApiClient.ScanApi() + +const scanFileForVirus = (fileBuffer) => + new Promise((success, failure) => { + apiInstance.scanFile(fileBuffer, (error, data) => { + if (error) { + logger.error( + `Error when calling Cloudmersive Virus Scan API: ${error.message}` + ) + failure(error) + } else { + logger.info("Cloudmersive Virus Scan API called successfully") + success(data) + } + }) + }) const validateAndSanitizeFileUpload = async (data) => { - const [schema, content] = data.split(",") + const [, content] = data.split(",") const fileBuffer = Buffer.from(content, "base64") const detectedFileType = await FileType.fromBuffer(fileBuffer) @@ -35,4 +62,4 @@ const validateAndSanitizeFileUpload = async (data) => { return undefined } -module.exports = { validateAndSanitizeFileUpload, ALLOWED_FILE_EXTENSIONS } +module.exports = { validateAndSanitizeFileUpload, scanFileForVirus, ALLOWED_FILE_EXTENSIONS } diff --git a/src/utils/jwt-utils.js b/src/utils/jwt-utils.js index ef4a079e6..f1107a56c 100644 --- a/src/utils/jwt-utils.js +++ b/src/utils/jwt-utils.js @@ -3,9 +3,11 @@ const AES = require("crypto-js/aes") const jwt = require("jsonwebtoken") const _ = require("lodash") -const { JWT_SECRET } = process.env -const { ENCRYPTION_SECRET } = process.env -const AUTH_TOKEN_EXPIRY_MS = process.env.AUTH_TOKEN_EXPIRY_DURATION_IN_MILLISECONDS.toString() +const { config } = require("@config/config") + +const JWT_SECRET = config.get("auth.jwtSecret") +const ENCRYPTION_SECRET = config.get("auth.encryptionSecret") +const AUTH_TOKEN_EXPIRY_MS = config.get("auth.tokenExpiry").toString() const jwtUtil = { decodeToken: _.wrap(jwt.decode, (decode, token) => decode(token)), diff --git a/src/utils/markdown-utils.js b/src/utils/markdown-utils.js index 5f1e8cdae..1b1d317d2 100644 --- a/src/utils/markdown-utils.js +++ b/src/utils/markdown-utils.js @@ -1,14 +1,21 @@ +const DOMPurify = require("isomorphic-dompurify") const _ = require("lodash") -const yaml = require("yaml") + +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const getTrailingSlashWithPermalink = (permalink) => permalink.endsWith("/") ? permalink : `${permalink}/` const retrieveDataFromMarkdown = (fileContent) => { // eslint-disable-next-line no-unused-vars - const [unused, encodedFrontMatter, ...pageContent] = fileContent.split("---") - const frontMatter = yaml.parse(encodedFrontMatter) - return { frontMatter, pageContent: pageContent.join("---") } + const [unused, encodedFrontMatter, ...pageContent] = DOMPurify.sanitize( + fileContent + ).split("---") + const frontMatter = sanitizedYamlParse(encodedFrontMatter) + return { frontMatter, pageContent: pageContent.join("---").trim() } } const isResourceFileOrLink = (frontMatter) => { @@ -25,9 +32,10 @@ const convertDataToMarkdown = (originalFrontMatter, pageContent) => { if (permalink) { frontMatter.permalink = getTrailingSlashWithPermalink(permalink) } - const newFrontMatter = yaml.stringify(frontMatter) + const newFrontMatter = sanitizedYamlStringify(frontMatter) const newContent = ["---\n", newFrontMatter, "---\n", pageContent].join("") - return newContent + + return DOMPurify.sanitize(newContent) } module.exports = { diff --git a/src/utils/mutex-utils.js b/src/utils/mutex-utils.js index 3874542d0..80ac2a0de 100644 --- a/src/utils/mutex-utils.js +++ b/src/utils/mutex-utils.js @@ -1,13 +1,17 @@ const AWS = require("aws-sdk") const { serializeError } = require("serialize-error") +const { config } = require("@config/config") + const logger = require("@logger/logger") const { ConflictError } = require("@errors/ConflictError") // Env vars -const { NODE_ENV, MUTEX_TABLE_NAME } = process.env -const IS_LOCAL_DEV = NODE_ENV === "LOCAL_DEV" || NODE_ENV === "test" +const NODE_ENV = config.get("env") +const MUTEX_TABLE_NAME = config.get("mutexTableName") + +const IS_DEV = NODE_ENV === "dev" || NODE_ENV === "test" || NODE_ENV === "vapt" const mockMutexObj = {} // Dynamodb constants @@ -29,7 +33,7 @@ const lock = async (siteName) => { const ONE_MIN_FROM_CURR_DATE_IN_SECONDS_FROM_EPOCH_TIME = Math.floor(new Date().valueOf() / 1000) + 60 - if (!IS_LOCAL_DEV) { + if (!IS_DEV) { const params = { TableName: MUTEX_TABLE_NAME, Item: { @@ -55,7 +59,7 @@ const lock = async (siteName) => { } const unlock = async (siteName) => { - if (IS_LOCAL_DEV) return mockUnlock(siteName) + if (IS_DEV) return mockUnlock(siteName) try { const params = { diff --git a/src/utils/notification-utils.ts b/src/utils/notification-utils.ts new file mode 100644 index 000000000..43924b6d2 --- /dev/null +++ b/src/utils/notification-utils.ts @@ -0,0 +1,49 @@ +import moment from "moment" + +export type NotificationType = + | "sent_request" + | "updated_request" + | "request_created" + | "request_approved" + | "request_cancelled" + +export const getNotificationExpiryDate = ( + notificationType: NotificationType +) => { + switch (notificationType) { + case "request_created": + case "request_approved": + case "request_cancelled": + // Always notify for review request information + return moment() + default: + return moment().subtract(3, "hours") + } +} + +export const getNotificationMessage = ( + notificationType: NotificationType, + sourceUsername: string +) => { + switch (notificationType) { + case "sent_request": + return `${sourceUsername} has sent you a review request.` + case "request_created": + return `${sourceUsername} created a review request.` + case "request_approved": + return `${sourceUsername} has approved a review request.` + case "request_cancelled": + return `${sourceUsername} has cancelled a review request.` + case "updated_request": + return `${sourceUsername} made changes to a review request.` + default: + return "Default notification" + } +} + +export const getNotificationPriority = (notificationType: NotificationType) => { + switch (notificationType) { + default: + return 2 + } +} diff --git a/src/utils/time-utils.ts b/src/utils/time-utils.ts new file mode 100644 index 000000000..8a0ab7a8a --- /dev/null +++ b/src/utils/time-utils.ts @@ -0,0 +1,2 @@ +export const milliSecondsToMinutes = (millis: number) => + Math.floor(millis / 60000) diff --git a/src/utils/utils.js b/src/utils/utils.js index 80cc2ff04..ee6fb5643 100644 --- a/src/utils/utils.js +++ b/src/utils/utils.js @@ -1,8 +1,10 @@ const slugify = require("slugify") +const { config } = require("@config/config") + const { genericGitHubAxiosInstance } = require("@services/api/AxiosInstance") -const { GITHUB_ORG_NAME } = process.env +const GITHUB_ORG_NAME = config.get("github.orgName") async function getCommitAndTreeSha(repo, accessToken, branchRef = "staging") { const headers = { diff --git a/src/utils/yaml-utils.ts b/src/utils/yaml-utils.ts new file mode 100644 index 000000000..55f2da3ba --- /dev/null +++ b/src/utils/yaml-utils.ts @@ -0,0 +1,11 @@ +import DOMPurify from "isomorphic-dompurify" +import yaml from "yaml" + +// Note: `yaml.parse()` and `yaml.stringify()` should not be used anywhere +// else in the codebase. +export const sanitizedYamlParse = ( + unparsedContent: string +): Record => yaml.parse(DOMPurify.sanitize(unparsedContent)) + +export const sanitizedYamlStringify = (prestringifiedContent: object): string => + DOMPurify.sanitize(yaml.stringify(prestringifiedContent)) diff --git a/tsconfig.json b/tsconfig.json index 7b086ea58..c8415484f 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -24,6 +24,7 @@ "@classes/*": ["./classes/*"], "@errors/*": ["./errors/*"], "@logger/*": ["./logger/*"], + "@config/*": ["./config/*"], "@middleware/*": ["./middleware/*"], "@routes/*": ["./routes/*"], "@utils/*": ["./utils/*"],