diff --git a/.env-example b/.env-example index fd7a6b36a..8469df1a0 100644 --- a/.env-example +++ b/.env-example @@ -1,9 +1,10 @@ export CLIENT_ID="" export CLIENT_SECRET="" -export REDIRECT_URI="http://localhost:8081/auth" +export REDIRECT_URI="http://localhost:8081/v1/auth" export NODE_ENV="LOCAL_DEV" export COOKIE_DOMAIN="localhost" export AUTH_TOKEN_EXPIRY_DURATION_IN_MILLISECONDS=3600000 +export SESSION_SECRET=mysessionsecretblah export JWT_SECRET=mysecretblah export ENCRYPTION_SECRET=anothersecretblah export FRONTEND_URL='http://localhost:8081' @@ -11,6 +12,8 @@ export GITHUB_ORG_NAME="isomerpages" export GITHUB_BUILD_ORG_NAME="opengovsg" export GITHUB_BUILD_REPO_NAME="isomer-build" export MUTEX_TABLE_NAME="" +export MAX_NUM_OTP_ATTEMPTS=5 +export OTP_EXPIRY=900000 # GitHub access token to create repo export SYSTEM_GITHUB_TOKEN="" @@ -48,3 +51,9 @@ export POSTMAN_SMS_CRED_NAME="" export DD_ENV="local" export DD_SERVICE="isomer" export DD_TAGS="service:isomer" + +# Cloudmersive +export CLOUDMERSIVE_API_KEY="" + +# GitGuardian +export GITGUARDIAN_API_KEY="" \ No newline at end of file diff --git a/.env.test b/.env.test index 473c9b6e6..c1b6d5fec 100644 --- a/.env.test +++ b/.env.test @@ -11,6 +11,8 @@ export GITHUB_ORG_NAME="isomerpages" export GITHUB_BUILD_ORG_NAME="opengovsg" export GITHUB_BUILD_REPO_NAME="isomer-build" export ISOMERPAGES_REPO_PAGE_COUNT=3 +export MAX_NUM_OTP_ATTEMPTS=5 +export OTP_EXPIRY=900000 # Database export DB_URI="postgres://isomer:password@localhost:54321/isomercms_test" diff --git a/.gitignore b/.gitignore index e31fe77b0..c53d70f7e 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ build/ .DS_Store .step-functions-local/ .serverless/ +.cache_ggshield diff --git a/.husky/pre-commit b/.husky/pre-commit index 36af21989..edcec8ef0 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -2,3 +2,4 @@ . "$(dirname "$0")/_/husky.sh" npx lint-staged +source .env && ggshield secret scan pre-commit diff --git a/README.md b/README.md index a9fde9e22..c33393293 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,37 @@ 3. next, run `npm i` to ensure that you have all the required packages 4. run `npm run dev` +## Setup + +1. Ensure pre-commit hooks are setup for safe commits. See below section on "Setting up Git Guardian" +2. Ensure node 14 is installed. Install and use nvm to manage multiple node versions. +3. Run `npm i` to install required packages +4. Ensure [Docker](https://www.docker.com/products/docker-desktop/) is installed +5. Run `npm run dev:services` to bring up the docker containers +6. Run `npm run dev` to start the server + +## Setting Up Git Guardian + +1. Install GitGuardian + +``` +brew install gitguardian/tap/ggshield +``` + +2. Add the API Key to your `.env` file + +``` +# Service API key from GitGuardian account +export GITGUARDIAN_API_KEY=abc123 +``` + +Notes: + +Only if necessary, + +- To skip all pre-commit hooks, use `$ git commit -m "commit message" -n` +- To skip only GitGuardianā€™s hook, use `$ SKIP=ggshield git commit -m "commit message"` + ## E2E Tests To run the E2E tests successfully, you will need to define the following environment variables: diff --git a/package-lock.json b/package-lock.json index f7092c9c0..45ab31963 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3975,10 +3975,9 @@ } }, "@mapbox/node-pre-gyp": { - "version": "1.0.9", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.9.tgz", - "integrity": "sha512-aDF3S3rK9Q2gey/WAttUlISduDItz5BU3306M9Eyv6/oS40aMprnopshtlKTykxRNIBEZuRMaZAnbrQ4QtKGyw==", - "dev": true, + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz", + "integrity": "sha512-4ySo4CjzStuprMwk35H5pPbkymjv1SF3jGLj6rAHp/xT/RF7TL7bd9CTm1xDY49K2qF7jmR/g7k+SkLETP6opA==", "requires": { "detect-libc": "^2.0.0", "https-proxy-agent": "^5.0.0", @@ -3994,14 +3993,12 @@ "detect-libc": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.1.tgz", - "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==", - "dev": true + "integrity": "sha512-463v3ZeIrcWtdgIg6vI6XUncguvr2TnGl4SzDXinkt9mSLpBJKXT3mW6xT3VQdDN11+WVs29pgvivTc4Lp8v+w==" }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, "requires": { "yallist": "^4.0.0" } @@ -4010,7 +4007,6 @@ "version": "7.3.7", "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz", "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==", - "dev": true, "requires": { "lru-cache": "^6.0.0" } @@ -4018,8 +4014,7 @@ "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" } } }, @@ -4821,6 +4816,15 @@ "@babel/types": "^7.3.0" } }, + "@types/bcrypt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/bcrypt/-/bcrypt-5.0.0.tgz", + "integrity": "sha512-agtcFKaruL8TmcvqbndlqHPSJgsolhf/qPWchFlgnW1gECTN/nKbFcoFnvKAQRFfKbh+BO6A3SWdJu9t+xF3Lw==", + "dev": true, + "requires": { + "@types/node": "*" + } + }, "@types/body-parser": { "version": "1.19.2", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.2.tgz", @@ -4875,6 +4879,14 @@ "@types/ms": "*" } }, + "@types/dompurify": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/@types/dompurify/-/dompurify-2.4.0.tgz", + "integrity": "sha512-IDBwO5IZhrKvHFUl+clZxgf3hn2b/lU6H1KaBShPkQyGJUQ0xwebezIPSuiyGwfz1UzJWQl4M7BDxtHtCCPlTg==", + "requires": { + "@types/trusted-types": "*" + } + }, "@types/express": { "version": "4.17.13", "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", @@ -4898,6 +4910,15 @@ "@types/range-parser": "*" } }, + "@types/express-session": { + "version": "1.17.5", + "resolved": "https://registry.npmjs.org/@types/express-session/-/express-session-1.17.5.tgz", + "integrity": "sha512-l0DhkvNVfyUPEEis8fcwbd46VptfA/jmMwHfob2TfDMf3HyPLiB9mKD71LXhz5TMUobODXPD27zXSwtFQLHm+w==", + "dev": true, + "requires": { + "@types/express": "*" + } + }, "@types/glob": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", @@ -5091,6 +5112,11 @@ "@types/superagent": "*" } }, + "@types/trusted-types": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@types/trusted-types/-/trusted-types-2.0.2.tgz", + "integrity": "sha512-F5DIZ36YVLE+PN+Zwws4kJogq47hNgX3Nx6WyDJ3kcplxyke3XIzB8uK5n/Lpm1HBsbGzd6nmGehL8cPekP+Tg==" + }, "@types/validator": { "version": "13.7.1", "resolved": "https://registry.npmjs.org/@types/validator/-/validator-13.7.1.tgz", @@ -5375,8 +5401,7 @@ "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "dev": true + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" }, "accepts": { "version": "1.3.8", @@ -5534,8 +5559,7 @@ "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "ansi-styles": { "version": "3.2.1", @@ -5559,8 +5583,7 @@ "aproba": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", - "dev": true + "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" }, "archive-type": { "version": "4.0.0", @@ -5583,7 +5606,6 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "dev": true, "requires": { "delegates": "^1.0.0", "readable-stream": "^3.6.0" @@ -6104,6 +6126,22 @@ "safe-buffer": "5.1.2" } }, + "bcrypt": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.0.tgz", + "integrity": "sha512-RHBS7HI5N5tEnGTmtR/pppX0mmDSBpQ4aCBsj7CEQfYXDcO74A8sIBYcJMuCsis2E81zDxeENYhv66oZwLiA+Q==", + "requires": { + "@mapbox/node-pre-gyp": "^1.0.10", + "node-addon-api": "^5.0.0" + }, + "dependencies": { + "node-addon-api": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", + "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==" + } + } + }, "before-after-hook": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.2.tgz", @@ -6789,6 +6827,78 @@ } } }, + "cloudmersive-virus-api-client": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/cloudmersive-virus-api-client/-/cloudmersive-virus-api-client-1.2.7.tgz", + "integrity": "sha512-t3eHfZtnCZdtgsvwZ0WaGWTgRDsiFdwtId5sK8ppYcM8ntj/pHh7qZAwADQsuaAxdOcpxwMygKBBNJTflylMIQ==", + "requires": { + "superagent": "3.7.0" + }, + "dependencies": { + "debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "requires": { + "ms": "^2.1.1" + } + }, + "form-data": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.1.tgz", + "integrity": "sha512-m21N3WOmEEURgk6B9GLOE4RuWOFf28Lhh9qGYeNlGq4VDXUlJy2th2slBNU8Gp8EzloYZOibZJ7t5ecIrFSjVA==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.6", + "mime-types": "^2.1.12" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "requires": { + "safe-buffer": "~5.1.0" + } + }, + "superagent": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/superagent/-/superagent-3.7.0.tgz", + "integrity": "sha512-/8trxO6NbLx4YXb7IeeFTSmsQ35pQBiTBsLNvobZx7qBzBeHYvKCyIIhW2gNcWbLzYxPAjdgFbiepd8ypwC0Gw==", + "requires": { + "component-emitter": "^1.2.0", + "cookiejar": "^2.1.0", + "debug": "^3.1.0", + "extend": "^3.0.0", + "form-data": "^2.3.1", + "formidable": "^1.1.1", + "methods": "^1.1.1", + "mime": "^1.4.1", + "qs": "^6.5.1", + "readable-stream": "^2.0.5" + } + } + } + }, "co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -6835,8 +6945,7 @@ "color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "dev": true + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==" }, "colorette": { "version": "1.2.2", @@ -7101,11 +7210,33 @@ "integrity": "sha512-gNld/3lySHwuhaVluJUKLePYirM3QNCKzVxqAdhJII9/WXKVX5PURzMVJspS1jTslSqjeuG4KMVTSouit5YPHA==", "dev": true }, + "connect-session-sequelize": { + "version": "7.1.5", + "resolved": "https://registry.npmjs.org/connect-session-sequelize/-/connect-session-sequelize-7.1.5.tgz", + "integrity": "sha512-oCHmWlCqhWoq6GGJ9z9PWLk0mrEsIfKlLE4WHUBjRIcSRgUh4OEIDfGY7rfBnlD6clRwwYQfK6+ks0fuTFf6WA==", + "requires": { + "debug": "^4.1.1" + }, + "dependencies": { + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + } + } + }, "console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", - "dev": true + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=" }, "content-disposition": { "version": "0.5.4", @@ -7692,8 +7823,7 @@ "delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=", - "dev": true + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=" }, "depd": { "version": "1.1.2", @@ -7796,11 +7926,6 @@ } } }, - "dompurify": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.3.1.tgz", - "integrity": "sha512-xGWt+NHAQS+4tpgbOAI08yxW0Pr256Gu/FNE2frZVTbgrBUn8M7tz7/ktS/LZ2MHeGqz6topj0/xY+y8R5FBFw==" - }, "dotenv": { "version": "16.0.3", "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.0.3.tgz", @@ -7858,8 +7983,7 @@ "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "dev": true + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "enabled": { "version": "2.0.0", @@ -8841,6 +8965,48 @@ "sort-keys-length": "^1.0.0" } }, + "express-rate-limit": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-6.7.0.tgz", + "integrity": "sha512-vhwIdRoqcYB/72TK3tRZI+0ttS8Ytrk24GfmsxDXK9o9IhHNO5bXRiXQSExPQ4GbaE5tvIS7j1SGrxsuWs+sGA==" + }, + "express-session": { + "version": "1.17.3", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.17.3.tgz", + "integrity": "sha512-4+otWXlShYlG1Ma+2Jnn+xgKUZTMJ5QD3YvfilX3AcocOAbIkVylSWEklzALe/+Pu4qV6TYBj5GwOBFfdKqLBw==", + "requires": { + "cookie": "0.4.2", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~2.0.0", + "on-headers": "~1.0.2", + "parseurl": "~1.3.3", + "safe-buffer": "5.2.1", + "uid-safe": "~2.1.5" + }, + "dependencies": { + "cookie": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.2.tgz", + "integrity": "sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA==" + }, + "depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" + } + } + }, + "extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, "external-editor": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", @@ -9450,7 +9616,6 @@ "version": "3.0.2", "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "dev": true, "requires": { "aproba": "^1.0.3 || ^2.0.0", "color-support": "^1.1.2", @@ -9466,20 +9631,17 @@ "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "dev": true + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -9490,7 +9652,6 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, "requires": { "ansi-regex": "^5.0.1" } @@ -9744,8 +9905,7 @@ "has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", - "dev": true + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=" }, "helmet": { "version": "4.6.0", @@ -10201,8 +10361,7 @@ "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "is-generator-fn": { "version": "2.1.0", @@ -10588,6 +10747,288 @@ "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", "dev": true }, + "isomorphic-dompurify": { + "version": "0.24.0", + "resolved": "https://registry.npmjs.org/isomorphic-dompurify/-/isomorphic-dompurify-0.24.0.tgz", + "integrity": "sha512-YePhHHQAVsU1CYkL3gKQmga+fTAh66eWg+RVQOVFRNfzoLkd+gFhFY5S+g80f8b0v2JBMYg+npqdZI1vOxTOBQ==", + "requires": { + "@types/dompurify": "^2.3.4", + "dompurify": "^2.4.1", + "jsdom": "^20.0.1" + }, + "dependencies": { + "@tootallnate/once": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz", + "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==" + }, + "abab": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/abab/-/abab-2.0.6.tgz", + "integrity": "sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==" + }, + "acorn": { + "version": "8.8.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.1.tgz", + "integrity": "sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA==" + }, + "acorn-globals": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/acorn-globals/-/acorn-globals-7.0.1.tgz", + "integrity": "sha512-umOSDSDrfHbTNPuNpC2NSnnA3LUrqpevPb4T9jRx4MagXNS0rs+gwiTcAvqCRmsD6utzsrzNt+ebm00SNWiC3Q==", + "requires": { + "acorn": "^8.1.0", + "acorn-walk": "^8.0.2" + } + }, + "acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==" + }, + "cssom": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/cssom/-/cssom-0.5.0.tgz", + "integrity": "sha512-iKuQcq+NdHqlAcwUY0o/HL69XQrUaQdMjmStJ8JFmUaiiQErlhrmuigkg/CU4E2J0IyUKUrMAgl36TvN67MqTw==" + }, + "data-urls": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-3.0.2.tgz", + "integrity": "sha512-Jy/tj3ldjZJo63sVAvg6LHt2mHvl4V6AgRAmNDtLdm7faqtsx+aJG42rsyCo9JCoRVKwPFzKlIPx3DIibwSIaQ==", + "requires": { + "abab": "^2.0.6", + "whatwg-mimetype": "^3.0.0", + "whatwg-url": "^11.0.0" + } + }, + "debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "requires": { + "ms": "2.1.2" + } + }, + "decimal.js": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==" + }, + "domexception": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/domexception/-/domexception-4.0.0.tgz", + "integrity": "sha512-A2is4PLG+eeSfoTMA95/s4pvAoSo2mKtiM5jlHkAVewmiO8ISFTFKZjH7UAM1Atli/OT/7JHOrJRJiMKUZKYBw==", + "requires": { + "webidl-conversions": "^7.0.0" + } + }, + "dompurify": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.4.1.tgz", + "integrity": "sha512-ewwFzHzrrneRjxzmK6oVz/rZn9VWspGFRDb4/rRtIsM1n36t9AKma/ye8syCpcw+XJ25kOK/hOG7t1j2I2yBqA==" + }, + "entities": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.4.0.tgz", + "integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA==" + }, + "escodegen": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-2.0.0.tgz", + "integrity": "sha512-mmHKys/C8BFUGI+MAWNcSYoORYLMdPzjrknd2Vc+bUsjN5bXcr8EhrNB+UTqfL1y3I9c4fw2ihgtMPQLBRiQxw==", + "requires": { + "esprima": "^4.0.1", + "estraverse": "^5.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1", + "source-map": "~0.6.1" + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==" + }, + "form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "requires": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + } + }, + "html-encoding-sniffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", + "integrity": "sha512-oWv4T4yJ52iKrufjnyZPkrN0CH3QnrUqdB6In1g5Fe1mia8GmF36gnfNySxoZtxD5+NmYw1EElVXiBk93UeskA==", + "requires": { + "whatwg-encoding": "^2.0.0" + } + }, + "http-proxy-agent": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz", + "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==", + "requires": { + "@tootallnate/once": "2", + "agent-base": "6", + "debug": "4" + } + }, + "https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "requires": { + "agent-base": "6", + "debug": "4" + } + }, + "iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + }, + "jsdom": { + "version": "20.0.3", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-20.0.3.tgz", + "integrity": "sha512-SYhBvTh89tTfCD/CRdSOm13mOBa42iTaTyfyEWBdKcGdPxPtLFBXuHR8XHb33YNYaP+lLbmSvBTsnoesCNJEsQ==", + "requires": { + "abab": "^2.0.6", + "acorn": "^8.8.1", + "acorn-globals": "^7.0.0", + "cssom": "^0.5.0", + "cssstyle": "^2.3.0", + "data-urls": "^3.0.2", + "decimal.js": "^10.4.2", + "domexception": "^4.0.0", + "escodegen": "^2.0.0", + "form-data": "^4.0.0", + "html-encoding-sniffer": "^3.0.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.1", + "is-potential-custom-element-name": "^1.0.1", + "nwsapi": "^2.2.2", + "parse5": "^7.1.1", + "saxes": "^6.0.0", + "symbol-tree": "^3.2.4", + "tough-cookie": "^4.1.2", + "w3c-xmlserializer": "^4.0.0", + "webidl-conversions": "^7.0.0", + "whatwg-encoding": "^2.0.0", + "whatwg-mimetype": "^3.0.0", + "whatwg-url": "^11.0.0", + "ws": "^8.11.0", + "xml-name-validator": "^4.0.0" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "nwsapi": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.2.tgz", + "integrity": "sha512-90yv+6538zuvUMnN+zCr8LuV6bPFdq50304114vJYJ8RDyK8D5O9Phpbd6SZWgI7PwzmmfN1upeOJlvybDSgCw==" + }, + "parse5": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.1.2.tgz", + "integrity": "sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==", + "requires": { + "entities": "^4.4.0" + } + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" + }, + "saxes": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz", + "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==", + "requires": { + "xmlchars": "^2.2.0" + } + }, + "tough-cookie": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.2.tgz", + "integrity": "sha512-G9fqXWoYFZgTc2z8Q5zaHy/vJMjm+WV0AkAeHxVCQiEB1b+dGvWzFW6QV07cY5jQ5gRkeid2qIkzkxUnmoQZUQ==", + "requires": { + "psl": "^1.1.33", + "punycode": "^2.1.1", + "universalify": "^0.2.0", + "url-parse": "^1.5.3" + } + }, + "tr46": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-3.0.0.tgz", + "integrity": "sha512-l7FvfAHlcmulp8kr+flpQZmVwtu7nfRV7NZujtN0OqES8EL4O4e0qqzL0DC5gAvx/ZC/9lk6rhcUwYvkBnBnYA==", + "requires": { + "punycode": "^2.1.1" + } + }, + "universalify": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz", + "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==" + }, + "w3c-xmlserializer": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-4.0.0.tgz", + "integrity": "sha512-d+BFHzbiCx6zGfz0HyQ6Rg69w9k19nviJspaj4yNscGjrHu94sVP+aRm75yEbCh+r2/yR+7q6hux9LVtbuTGBw==", + "requires": { + "xml-name-validator": "^4.0.0" + } + }, + "webidl-conversions": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz", + "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==" + }, + "whatwg-encoding": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-2.0.0.tgz", + "integrity": "sha512-p41ogyeMUrw3jWclHWTQg1k05DSVXPLcVxRTYsXUk+ZooOCZLcoYgPZ/HL/D/N+uQPOtcp1me1WhBEaX02mhWg==", + "requires": { + "iconv-lite": "0.6.3" + } + }, + "whatwg-mimetype": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-3.0.0.tgz", + "integrity": "sha512-nt+N2dzIutVRxARx1nghPKGv1xHikU7HKdfafKkLNLindmPU/ch3U31NOCGGA/dmPcmb1VlofO0vnKAcsm0o/Q==" + }, + "whatwg-url": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-11.0.0.tgz", + "integrity": "sha512-RKT8HExMpoYx4igMiVMY83lN6UeITKJlBQ+vR/8ZJ8OCdSiN3RwCq+9gH0+Xzj0+5IrM6i4j/6LuvzbZIQgEcQ==", + "requires": { + "tr46": "^3.0.0", + "webidl-conversions": "^7.0.0" + } + }, + "ws": { + "version": "8.11.0", + "resolved": "https://registry.npmjs.org/ws/-/ws-8.11.0.tgz", + "integrity": "sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg==" + }, + "xml-name-validator": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-4.0.0.tgz", + "integrity": "sha512-ICP2e+jsHvAj2E2lIHxa5tjXRlKDJo4IdvPvCXbXQGdzSfmSpNVyIKMvoZHjDY9DP0zV17iI85o90vRFXNccRw==" + } + } + }, "isomorphic-git": { "version": "1.18.3", "resolved": "https://registry.npmjs.org/isomorphic-git/-/isomorphic-git-1.18.3.tgz", @@ -12909,7 +13350,6 @@ "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "dev": true, "requires": { "semver": "^6.0.0" }, @@ -12917,8 +13357,7 @@ "semver": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", - "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", - "dev": true + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" } } }, @@ -13327,6 +13766,11 @@ "integrity": "sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==", "dev": true }, + "nocache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/nocache/-/nocache-3.0.4.tgz", + "integrity": "sha512-WDD0bdg9mbq6F4mRxEYcPWwfA1vxd0mrvKOyxI7Xj/atfRHVeutzuWByG//jfm4uPzp0y4Kj051EORCBSQMycw==" + }, "node-addon-api": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", @@ -13518,7 +13962,6 @@ "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "dev": true, "requires": { "abbrev": "1" } @@ -13548,7 +13991,6 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "dev": true, "requires": { "are-we-there-yet": "^2.0.0", "console-control-strings": "^1.1.0", @@ -14464,6 +14906,11 @@ "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" }, + "querystringify": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz", + "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==" + }, "queue-microtask": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", @@ -14476,6 +14923,11 @@ "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", "dev": true }, + "random-bytes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", + "integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==" + }, "range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -14588,6 +15040,11 @@ "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", "dev": true }, + "requires-port": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz", + "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==" + }, "resolve": { "version": "1.20.0", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", @@ -15057,8 +15514,7 @@ "set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", - "dev": true + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" }, "setprototypeof": { "version": "1.2.0", @@ -15109,7 +15565,6 @@ "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true }, "simple-concat": { "version": "1.0.1", @@ -15537,7 +15992,6 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", @@ -15594,7 +16048,6 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "dev": true, "requires": { "ansi-regex": "^5.0.1" } @@ -16373,6 +16826,14 @@ "dev": true, "optional": true }, + "uid-safe": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", + "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", + "requires": { + "random-bytes": "~1.0.0" + } + }, "umzug": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/umzug/-/umzug-3.0.0.tgz", @@ -16517,6 +16978,15 @@ "querystring": "0.2.0" } }, + "url-parse": { + "version": "1.5.10", + "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz", + "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==", + "requires": { + "querystringify": "^2.1.1", + "requires-port": "^1.0.0" + } + }, "util": { "version": "0.12.4", "resolved": "https://registry.npmjs.org/util/-/util-0.12.4.tgz", @@ -16869,7 +17339,6 @@ "version": "1.1.5", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "dev": true, "requires": { "string-width": "^1.0.2 || 2 || 3 || 4" } diff --git a/package.json b/package.json index d7b5a1507..801659c6d 100644 --- a/package.json +++ b/package.json @@ -34,21 +34,26 @@ "aws-sdk": "^2.946.0", "axios": "^0.21.3", "base-64": "^0.1.0", + "bcrypt": "^5.1.0", "bluebird": "^3.7.2", "body-parser": "^1.19.2", + "cloudmersive-virus-api-client": "^1.2.7", + "connect-session-sequelize": "^7.1.5", "cookie-parser": "~1.4.5", "cors": "^2.8.5", "crypto-js": "^4.1.1", "dd-trace": "^2.9.1", "debug": "~2.6.9", - "dompurify": "^2.3.1", "dotenv": "^16.0.1", "exponential-backoff": "^3.1.0", "express": "~4.17.3", + "express-session": "^1.17.3", + "express-rate-limit": "^6.7.0", "file-type": "^16.5.4", "helmet": "^4.6.0", "http-errors": "~1.8.0", "is-svg": "^4.3.1", + "isomorphic-dompurify": "^0.24.0", "isomorphic-git": "^1.18.2", "joi": "^17.4.0", "js-base64": "^2.6.4", @@ -59,6 +64,7 @@ "moment-timezone": "^0.5.35", "morgan": "~1.10.0", "neverthrow": "^4.3.1", + "nocache": "^3.0.4", "otplib": "^12.0.1", "pg": "^8.6.0", "pg-connection-string": "^2.5.0", @@ -86,8 +92,10 @@ "@swc/helpers": "^0.3.8", "@tsconfig/recommended": "^1.0.1", "@types/aws-lambda": "^8.10.106", + "@types/bcrypt": "^5.0.0", "@types/cookie-parser": "^1.4.3", "@types/express": "^4.17.13", + "@types/express-session": "^1.17.5", "@types/jest": "^27.4.1", "@types/lodash": "^4.14.186", "@types/node": "^17.0.21", diff --git a/src/classes/Collection.js b/src/classes/Collection.js index 1999f4355..66ea97b43 100644 --- a/src/classes/Collection.js +++ b/src/classes/Collection.js @@ -1,4 +1,7 @@ -const yaml = require("yaml") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") require("bluebird") require("lodash") @@ -69,20 +72,22 @@ class Collection { } if (ISOMER_TEMPLATE_PROTECTED_DIRS.includes(collectionName)) throw new ConflictError(protectedFolderConflictErrorMsg(collectionName)) - const newContent = Base64.encode(yaml.stringify(contentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(contentObject)) await collectionConfig.create(newContent) const nav = new File(this.accessToken, this.siteName) const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) navContentObject.links.push({ title: deslugifyCollectionName(collectionName), collection: collectionName, }) - const newNavContent = Base64.encode(yaml.stringify(navContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(navContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) } @@ -118,7 +123,7 @@ class Collection { const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) const newNavLinks = navContentObject.links.filter( (link) => link.collection !== collectionName @@ -127,7 +132,9 @@ class Collection { ...navContentObject, links: newNavLinks, } - const newNavContent = Base64.encode(yaml.stringify(newNavContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(newNavContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) } @@ -191,7 +198,7 @@ class Collection { }, } const newConfigContent = Base64.encode( - yaml.stringify(newConfigContentObject) + sanitizedYamlStringify(newConfigContentObject) ) await collectionConfig.update(newConfigContent, configSha) @@ -200,7 +207,7 @@ class Collection { const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) const newNavLinks = navContentObject.links.map((link) => { if (link.collection === oldCollectionName) { @@ -217,7 +224,9 @@ class Collection { links: newNavLinks, } - const newNavContent = Base64.encode(yaml.stringify(newNavContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(newNavContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) } } diff --git a/src/classes/Config.js b/src/classes/Config.js index b1d7de383..1ea2668c4 100644 --- a/src/classes/Config.js +++ b/src/classes/Config.js @@ -1,5 +1,4 @@ const _ = require("lodash") -const yaml = require("yaml") const { ConflictError, @@ -8,6 +7,10 @@ const { const { NotFoundError } = require("@errors/NotFoundError") const validateStatus = require("@utils/axios-utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const { genericGitHubAxiosInstance: axios, @@ -121,7 +124,7 @@ class CollectionConfig extends Config { async read() { const { content, sha } = await super.read() - const contentObject = yaml.parse(Base64.decode(content)) + const contentObject = sanitizedYamlParse(Base64.decode(content)) return { content: contentObject, sha } } @@ -144,7 +147,7 @@ class CollectionConfig extends Config { } } content.collections[collectionName].order.splice(newIndex, 0, item) - const newContent = Base64.encode(yaml.stringify(content)) + const newContent = Base64.encode(sanitizedYamlStringify(content)) await this.update(newContent, sha) } @@ -154,7 +157,7 @@ class CollectionConfig extends Config { const { content, sha } = await this.read() const index = content.collections[collectionName].order.indexOf(item) content.collections[collectionName].order.splice(index, 1) - const newContent = Base64.encode(yaml.stringify(content)) + const newContent = Base64.encode(sanitizedYamlStringify(content)) await this.update(newContent, sha) return { index, item } @@ -166,7 +169,7 @@ class CollectionConfig extends Config { const index = content.collections[collectionName].order.indexOf(oldItem) content.collections[collectionName].order.splice(index, 1) content.collections[collectionName].order.splice(index, 0, newItem) - const newContent = Base64.encode(yaml.stringify(content)) + const newContent = Base64.encode(sanitizedYamlStringify(content)) await this.update(newContent, sha) } @@ -179,7 +182,7 @@ class CollectionConfig extends Config { ) const newContentObject = _.cloneDeep(content) newContentObject.collections[collectionName].order = filteredOrder - const newContent = Base64.encode(yaml.stringify(newContentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(newContentObject)) await this.update(newContent, sha) } @@ -196,7 +199,7 @@ class CollectionConfig extends Config { ) const newContentObject = _.cloneDeep(content) newContentObject.collections[collectionName].order = renamedOrder - const newContent = Base64.encode(yaml.stringify(newContentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(newContentObject)) await this.update(newContent, sha) } diff --git a/src/classes/Resource.js b/src/classes/Resource.js index 23e4e73e9..d10f846f1 100644 --- a/src/classes/Resource.js +++ b/src/classes/Resource.js @@ -1,6 +1,5 @@ const Bluebird = require("bluebird") const _ = require("lodash") -const yaml = require("yaml") // Import classes const { NotFoundError } = require("@errors/NotFoundError") @@ -18,6 +17,10 @@ const { sendTree, deslugifyCollectionName, } = require("@utils/utils.js") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") // Constants const RESOURCE_INDEX_PATH = "index.html" @@ -47,7 +50,7 @@ class Resource { layout: "resources-alt", title: deslugifyCollectionName(resourceName), } - const resourceFrontMatter = yaml.stringify(resourceObject) + const resourceFrontMatter = sanitizedYamlStringify(resourceObject) const resourceIndexContent = ["---\n", resourceFrontMatter, "---"].join("") return IsomerFile.create( `${RESOURCE_INDEX_PATH}`, @@ -108,9 +111,11 @@ class Resource { IsomerFile.setFileType(resourceType) const { content, sha } = await IsomerFile.read(RESOURCE_INDEX_PATH) const decodedContent = Base64.decode(content) - const resourceFrontMatterObj = yaml.parse(decodedContent.split("---")[1]) + const resourceFrontMatterObj = sanitizedYamlParse( + decodedContent.split("---")[1] + ) resourceFrontMatterObj.title = deslugifyCollectionName(newResourceName) - const resourceFrontMatter = yaml.stringify(resourceFrontMatterObj) + const resourceFrontMatter = sanitizedYamlStringify(resourceFrontMatterObj) const resourceIndexContent = ["---\n", resourceFrontMatter, "---"].join("") await IsomerFile.update( RESOURCE_INDEX_PATH, diff --git a/src/classes/ResourceRoom.js b/src/classes/ResourceRoom.js index 5eab979d2..95d5d688c 100644 --- a/src/classes/ResourceRoom.js +++ b/src/classes/ResourceRoom.js @@ -1,6 +1,5 @@ const Bluebird = require("bluebird") const _ = require("lodash") -const yaml = require("yaml") // Import Classes const { Config } = require("@classes/Config.js") @@ -13,6 +12,10 @@ const { sendTree, deslugifyCollectionName, } = require("@utils/utils.js") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") // Constants const RESOURCE_ROOM_INDEX_PATH = "index.html" @@ -27,7 +30,7 @@ class ResourceRoom { async get() { const config = new Config(this.accessToken, this.siteName) const { content } = await config.read() - const contentObject = yaml.parse(Base64.decode(content)) + const contentObject = sanitizedYamlParse(Base64.decode(content)) return contentObject.resources_name } @@ -35,11 +38,11 @@ class ResourceRoom { async create(resourceRoom) { const config = new Config(this.accessToken, this.siteName) const { content, sha } = await config.read() - const contentObject = yaml.parse(Base64.decode(content)) + const contentObject = sanitizedYamlParse(Base64.decode(content)) contentObject.resources_name = resourceRoom - const newContent = Base64.encode(yaml.stringify(contentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(contentObject)) // Create index file in resourceRoom const IsomerIndexFile = new File(this.accessToken, this.siteName) @@ -49,7 +52,7 @@ class ResourceRoom { layout: "resources", title: deslugifyCollectionName(resourceRoom), } - const resourceRoomFrontMatter = yaml.stringify(resourceRoomObject) + const resourceRoomFrontMatter = sanitizedYamlStringify(resourceRoomObject) const resourceRoomIndexContent = [ "---\n", resourceRoomFrontMatter, @@ -66,13 +69,15 @@ class ResourceRoom { const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) navContentObject.links.push({ title: deslugifyCollectionName(resourceRoom), resource_room: true, }) - const newNavContent = Base64.encode(yaml.stringify(navContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(navContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) @@ -83,12 +88,12 @@ class ResourceRoom { // Add resource room to config const config = new Config(this.accessToken, this.siteName) const { content: configContent, sha: configSha } = await config.read() - const contentObject = yaml.parse(Base64.decode(configContent)) + const contentObject = sanitizedYamlParse(Base64.decode(configContent)) // Obtain existing resourceRoomName const resourceRoomName = contentObject.resources_name contentObject.resources_name = newResourceRoom - const newContent = Base64.encode(yaml.stringify(contentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(contentObject)) const commitMessage = `Rename resource room from ${resourceRoomName} to ${newResourceRoom}` @@ -99,7 +104,7 @@ class ResourceRoom { const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) const newNavLinks = navContentObject.links.map((link) => { if (link.resource_room === true) { @@ -114,7 +119,9 @@ class ResourceRoom { ...navContentObject, links: newNavLinks, } - const newNavContent = Base64.encode(yaml.stringify(newNavContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(newNavContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) const { currentCommitSha, treeSha } = await getCommitAndTreeSha( @@ -164,9 +171,11 @@ class ResourceRoom { sha: resourceFileSha, } = await IsomerFile.read(RESOURCE_ROOM_INDEX_PATH) const decodedContent = Base64.decode(resourceFileContent) - const resourceFrontMatterObj = yaml.parse(decodedContent.split("---")[1]) + const resourceFrontMatterObj = sanitizedYamlParse( + decodedContent.split("---")[1] + ) resourceFrontMatterObj.title = deslugifyCollectionName(newResourceRoom) - const resourceFrontMatter = yaml.stringify(resourceFrontMatterObj) + const resourceFrontMatter = sanitizedYamlStringify(resourceFrontMatterObj) const resourceIndexContent = ["---\n", resourceFrontMatter, "---"].join("") await IsomerFile.update( RESOURCE_ROOM_INDEX_PATH, @@ -181,21 +190,21 @@ class ResourceRoom { // Delete resource in config const config = new Config(this.accessToken, this.siteName) const { content, sha } = await config.read() - const contentObject = yaml.parse(Base64.decode(content)) + const contentObject = sanitizedYamlParse(Base64.decode(content)) // Obtain resourceRoomName const resourceRoomName = contentObject.resources_name // Delete resourcses_name from Config delete contentObject.resources_name - const newContent = Base64.encode(yaml.stringify(contentObject)) + const newContent = Base64.encode(sanitizedYamlStringify(contentObject)) // Delete resource room in nav if it exists const nav = new File(this.accessToken, this.siteName) const dataType = new DataType() nav.setFileType(dataType) const { content: navContent, sha: navSha } = await nav.read(NAV_FILE_NAME) - const navContentObject = yaml.parse(Base64.decode(navContent)) + const navContentObject = sanitizedYamlParse(Base64.decode(navContent)) // Assumption: only a single resource room exists const newNavLinks = navContentObject.links.filter( @@ -205,7 +214,9 @@ class ResourceRoom { ...navContentObject, links: newNavLinks, } - const newNavContent = Base64.encode(yaml.stringify(newNavContentObject)) + const newNavContent = Base64.encode( + sanitizedYamlStringify(newNavContentObject) + ) await nav.update(NAV_FILE_NAME, newNavContent, navSha) // Delete all resources and resourcePages diff --git a/src/classes/Settings.js b/src/classes/Settings.js index bee68f202..33de19ae3 100644 --- a/src/classes/Settings.js +++ b/src/classes/Settings.js @@ -1,12 +1,16 @@ const Bluebird = require("bluebird") const { Base64 } = require("js-base64") const _ = require("lodash") -const yaml = require("yaml") // import classes const { Config } = require("@classes/Config.js") const { File, DataType, HomepageType } = require("@classes/File.js") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") + // Constants const FOOTER_PATH = "footer.yml" const NAVIGATION_PATH = "navigation.yml" @@ -49,7 +53,7 @@ const retrieveSettingsFiles = async ( // homepage requires special extraction as the content is wrapped in front matter if (fileOpKey === "homepage") { const homepageContent = Base64.decode(content) - const homepageFrontMatterObj = yaml.parse( + const homepageFrontMatterObj = sanitizedYamlParse( homepageContent.split("---")[1] ) return { type: fileOpKey, content: homepageFrontMatterObj, sha } @@ -57,7 +61,7 @@ const retrieveSettingsFiles = async ( return { type: fileOpKey, - content: yaml.parse(Base64.decode(content)), + content: sanitizedYamlParse(Base64.decode(content)), sha, } } @@ -209,7 +213,9 @@ class Settings { // To-do: use Git Tree to speed up operations if (!_.isEmpty(configSettings)) { - const newConfigContent = Base64.encode(yaml.stringify(configSettingsObj)) + const newConfigContent = Base64.encode( + sanitizedYamlStringify(configSettingsObj) + ) await configResp.update(newConfigContent, config.sha) // Update title and description in homepage as well if it's changed @@ -222,7 +228,7 @@ class Settings { if (hasTitleChanged) homepageContentObj.title = configSettings.title if (hasDescriptionChanged) homepageContentObj.description = configSettings.description - const homepageFrontMatter = yaml.stringify(homepageContentObj) + const homepageFrontMatter = sanitizedYamlStringify(homepageContentObj) const homepageContent = ["---\n", homepageFrontMatter, "---"].join("") const newHomepageContent = Base64.encode(homepageContent) @@ -232,13 +238,15 @@ class Settings { } if (!_.isEmpty(footerSettings)) { - const newFooterContent = Base64.encode(yaml.stringify(footerSettingsObj)) + const newFooterContent = Base64.encode( + sanitizedYamlStringify(footerSettingsObj) + ) await FooterFile.update(FOOTER_PATH, newFooterContent, footer.sha) } if (!_.isEmpty(navigationSettings)) { const newNavigationContent = Base64.encode( - yaml.stringify(navigationSettingsObj) + sanitizedYamlStringify(navigationSettingsObj) ) await NavigationFile.update( NAVIGATION_PATH, diff --git a/src/database/migrations/20230125033437-add-sessions.js b/src/database/migrations/20230125033437-add-sessions.js new file mode 100644 index 000000000..8d650ccc0 --- /dev/null +++ b/src/database/migrations/20230125033437-add-sessions.js @@ -0,0 +1,28 @@ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("sessions", { + sid: { + primaryKey: true, + type: Sequelize.STRING(36), + }, + expires: { + type: Sequelize.DATE, + }, + data: { + type: Sequelize.TEXT, + }, + created_at: { + allowNull: false, + type: Sequelize.DATE, + }, + updated_at: { + allowNull: false, + type: Sequelize.DATE, + }, + }) + }, + + async down(queryInterface) { + await queryInterface.dropTable("sessions") + }, +} diff --git a/src/database/migrations/20230214055456-create-otps.js b/src/database/migrations/20230214055456-create-otps.js new file mode 100644 index 000000000..c5f75c587 --- /dev/null +++ b/src/database/migrations/20230214055456-create-otps.js @@ -0,0 +1,49 @@ +/** @type {import('sequelize-cli').Migration} */ +module.exports = { + async up(queryInterface, Sequelize) { + await queryInterface.createTable("otps", { + id: { + allowNull: false, + autoIncrement: true, + primaryKey: true, + type: Sequelize.INTEGER, + }, + email: { + allowNull: true, + unique: true, + type: Sequelize.STRING, + }, + mobile_number: { + allowNull: true, + unique: true, + type: Sequelize.STRING, + }, + hashed_otp: { + allowNull: false, + validate: { + notEmpty: true, + }, + type: Sequelize.STRING, + }, + attempts: { + allowNull: false, + type: Sequelize.INTEGER, + }, + expires_at: { + allowNull: false, + type: Sequelize.DATE, + }, + created_at: { + allowNull: false, + type: Sequelize.DATE, + }, + updated_at: { + allowNull: false, + type: Sequelize.DATE, + }, + }) + }, + async down(queryInterface, Sequelize) { + await queryInterface.dropTable("otps") + }, +} diff --git a/src/database/models/Otp.ts b/src/database/models/Otp.ts new file mode 100644 index 000000000..c9324acec --- /dev/null +++ b/src/database/models/Otp.ts @@ -0,0 +1,61 @@ +import { + Column, + CreatedAt, + DataType, + Model, + Table, + UpdatedAt, +} from "sequelize-typescript" + +@Table({ tableName: "otps" }) +export class Otp extends Model { + @Column({ + autoIncrement: true, + primaryKey: true, + allowNull: false, + type: DataType.BIGINT, + }) + id!: number + + @Column({ + allowNull: true, + unique: true, + type: DataType.TEXT, + }) + email?: string | null + + @Column({ + allowNull: true, + unique: true, + type: DataType.TEXT, + }) + mobileNumber?: string | null + + @Column({ + allowNull: false, + type: DataType.TEXT, + validate: { + notEmpty: true, + }, + }) + hashedOtp!: string + + // tracks number of times user attempts to submit the OTP code and log in + @Column({ + type: DataType.INTEGER, + defaultValue: 0, + }) + attempts!: number + + @Column({ + allowNull: false, + type: DataType.DATE, + }) + expiresAt!: Date + + @CreatedAt + createdAt!: Date + + @UpdatedAt + updatedAt!: Date +} diff --git a/src/database/models/index.ts b/src/database/models/index.ts index 89825da63..c6df83264 100644 --- a/src/database/models/index.ts +++ b/src/database/models/index.ts @@ -3,6 +3,7 @@ export * from "@database/models/SiteMember" export * from "@database/models/User" export * from "@database/models/Whitelist" export * from "@database/models/AccessToken" +export * from "@database/models/Otp" export * from "@database/models/Repo" export * from "@database/models/Deployment" export * from "@database/models/Launch" diff --git a/src/fixtures/identity.ts b/src/fixtures/identity.ts index 0e20ed44c..69bc15588 100644 --- a/src/fixtures/identity.ts +++ b/src/fixtures/identity.ts @@ -38,7 +38,6 @@ export const mockCollaboratorContributor1: Attributes & { id: 1, email: MOCK_IDENTITY_EMAIL_ONE, githubId: "test1", - contactNumber: "12331231", lastLoggedIn: new Date("2022-07-30T07:41:09.661Z"), createdAt: new Date("2022-04-04T07:25:41.013Z"), updatedAt: new Date("2022-07-30T07:41:09.662Z"), @@ -59,7 +58,6 @@ export const mockCollaboratorAdmin1: Attributes & { id: 2, email: MOCK_IDENTITY_EMAIL_TWO, githubId: "test2", - contactNumber: "12331232", lastLoggedIn: new Date("2022-07-30T07:41:09.661Z"), createdAt: new Date("2022-04-04T07:25:41.013Z"), updatedAt: new Date("2022-07-30T07:41:09.662Z"), @@ -79,7 +77,6 @@ export const mockCollaboratorAdmin2: Attributes & { id: 3, email: MOCK_IDENTITY_EMAIL_THREE, githubId: "test3", - contactNumber: "12331233", lastLoggedIn: new Date("2022-06-30T07:41:09.661Z"), createdAt: new Date("2022-04-04T07:25:41.013Z"), updatedAt: new Date("2022-07-30T07:41:09.662Z"), @@ -99,7 +96,6 @@ export const mockCollaboratorContributor2: Attributes & { id: 4, email: MOCK_IDENTITY_EMAIL_FOUR, githubId: "test4", - contactNumber: "12331234", lastLoggedIn: new Date("2022-07-30T07:41:09.661Z"), createdAt: new Date("2022-04-04T07:25:41.013Z"), updatedAt: new Date("2022-07-30T07:41:09.662Z"), diff --git a/src/fixtures/markdown-fixtures.ts b/src/fixtures/markdown-fixtures.ts new file mode 100644 index 000000000..ca793a7b3 --- /dev/null +++ b/src/fixtures/markdown-fixtures.ts @@ -0,0 +1,48 @@ +const normalFrontMatter = `layout: simple-page +title: Digital Transformation +permalink: /digital-transformation/ +breadcrumb: Digital Transformation` + +const maliciousFrontMatter = `layout: simple-page +title: Digital Transformation +permalink: /digital-transformation/ +breadcrumb: Digital Transformation` + +const normalPageContent = `### Test header +### **Subheader** +Content +![Image](/path/to-image.jpg)` + +const maliciousPageContent = `### Test header +### **Subheader** +Content +![Image](/path/to-image.jpg)` + +export const normalMarkdownContent = `--- +${normalFrontMatter} +--- +${normalPageContent}` + +export const maliciousMarkdownContent = `--- +${maliciousFrontMatter} +--- +${maliciousPageContent}` + +export const normalJsonObject = { + frontMatter: { + layout: "simple-page", + title: "Digital Transformation", + permalink: "/digital-transformation/", + breadcrumb: "Digital Transformation", + }, + pageContent: normalPageContent, +} +export const maliciousJsonObject = { + frontMatter: { + layout: "simple-page", + title: "Digital Transformation", + permalink: "/digital-transformation/", + breadcrumb: "Digital Transformation", + }, + pageContent: maliciousPageContent, +} diff --git a/src/fixtures/yaml-fixtures.ts b/src/fixtures/yaml-fixtures.ts new file mode 100644 index 000000000..ac8c5cce5 --- /dev/null +++ b/src/fixtures/yaml-fixtures.ts @@ -0,0 +1,65 @@ +export const normalYamlString = `logo: /path-to/logo.png +links: + - title: TitleA + url: /title-a + - title: TitleB + url: /title-b + - title: TitleC + url: /title-c + sublinks: + - title: sublink-1 + url: /sublink-1 + - title: sublink-2 + url: /sublink-2 +` +export const maliciousYamlString = `logo: /path-to/logo.png +links: + - title: TitleA + url: /title-a + - title: TitleB + url: /title-b + - title: TitleC + url: /title-c + sublinks: + - title: sublink-1 + url: /sublink-1 + - title: sublink-2 + url: /sublink-2 +` + +export const normalYamlObject = { + logo: "/path-to/logo.png", + links: [ + { title: "TitleA", url: "/title-a" }, + { title: "TitleB", url: "/title-b" }, + { + title: "TitleC", + url: "/title-c", + sublinks: [ + { title: "sublink-1", url: "/sublink-1" }, + { title: "sublink-2", url: "/sublink-2" }, + ], + }, + ], +} +export const maliciousYamlObject = { + logo: "/path-to/logo.png", + links: [ + { title: "TitleA", url: "/title-a" }, + { title: "TitleB", url: "/title-b" }, + { + title: "TitleC", + url: "/title-c", + sublinks: [ + { + title: "sublink-1", + url: "/sublink-1", + }, + { + title: "sublink-2", + url: "/sublink-2", + }, + ], + }, + ], +} diff --git a/src/integration/Users.spec.ts b/src/integration/Users.spec.ts index 0d5a4e630..e942e66c4 100644 --- a/src/integration/Users.spec.ts +++ b/src/integration/Users.spec.ts @@ -2,7 +2,7 @@ import express from "express" import mockAxios from "jest-mock-axios" import request from "supertest" -import { User, Whitelist } from "@database/models" +import { User, Whitelist, Otp } from "@database/models" import { generateRouter } from "@fixtures/app" import UserSessionData from "@root/classes/UserSessionData" import { mockIsomerUserId } from "@root/fixtures/sessionData" @@ -22,6 +22,9 @@ const mockWhitelistedDomain = ".gov.sg" const mockGithubId = "i m a git" const mockValidNumber = "92341234" const mockInvalidNumber = "00000000" +const maxNumOfOtpAttempts = + parseInt(process.env.MAX_NUM_OTP_ATTEMPTS || "", 10) ?? 5 +const mockInvalidOtp = "000000" const UsersService = getUsersService(sequelize) @@ -156,6 +159,9 @@ describe("Users Router", () => { await Whitelist.destroy({ where: { email: mockWhitelistedDomain }, }) + await Otp.destroy({ + where: { email: mockValidEmail }, + }) }) it("should return 200 when the otp is correct", async () => { @@ -166,8 +172,10 @@ describe("Users Router", () => { otp = extractEmailOtp(email.body) return email }) + await User.create({ id: mockIsomerUserId }) await Whitelist.create({ email: mockWhitelistedDomain }) + await request(app).post("/email/otp").send({ email: mockValidEmail, }) @@ -192,8 +200,9 @@ describe("Users Router", () => { it("should return 400 when the otp is wrong", async () => { // Arrange const expected = 400 - const wrongOtp = 123456 + const wrongOtp = "123456" mockAxios.post.mockResolvedValueOnce(200) + await Whitelist.create({ email: mockWhitelistedDomain }) await User.create({ id: mockIsomerUserId }) await request(app).post("/email/otp").send({ email: mockValidEmail, @@ -214,6 +223,7 @@ describe("Users Router", () => { // Arrange const expected = 400 mockAxios.post.mockResolvedValueOnce(200) + await Whitelist.create({ email: mockWhitelistedDomain }) await User.create({ id: mockIsomerUserId }) await request(app).post("/email/otp").send({ email: mockValidEmail, @@ -234,6 +244,7 @@ describe("Users Router", () => { // Arrange const expected = 400 mockAxios.post.mockResolvedValueOnce(200) + await Whitelist.create({ email: mockWhitelistedDomain }) await User.create({ id: mockIsomerUserId }) await request(app).post("/email/otp").send({ email: mockValidEmail, @@ -249,6 +260,121 @@ describe("Users Router", () => { // Assert expect(actual.statusCode).toBe(expected) }) + + it("should only ensure the latest email otp is valid", async () => { + // Arrange + const expected = 200 + let otp + mockAxios.post.mockImplementation((_: any, email: any) => { + otp = extractEmailOtp(email.body) + return email + }) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + + // Act + const actual = await request(app).post("/email/verifyOtp").send({ + email: mockValidEmail, + otp, + userId: mockIsomerUserId, + }) + const oldOtp = otp + + // Assert + expect(actual.statusCode).toBe(expected) + + // Arrange + const newExpected = 400 + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + + const newActual = await request(app).post("/email/verifyOtp").send({ + email: mockValidEmail, + otp: oldOtp, + userId: mockIsomerUserId, + }) + + // Assert + expect(oldOtp).not.toBe(otp) + expect(newActual.statusCode).toBe(newExpected) + }) + + it("should return 400 when max number of email otp attempts is reached with correct error message", async () => { + // Arrange + const expected = 400 + mockAxios.post.mockResolvedValue(200) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + + const numOfAttempts = 10 // arbitrary number > maxNumOfAttempts + for (let i = 1; i <= numOfAttempts; i++) { + const actual = await request(app).post("/email/verifyOtp").send({ + email: mockValidEmail, + otp: mockInvalidOtp, + userId: mockIsomerUserId, + }) + const otpEntry = await Otp.findOne({ + where: { email: mockValidEmail }, + }) + + // Assert + expect(actual.statusCode).toBe(expected) + + if (i <= maxNumOfOtpAttempts) { + expect(otpEntry?.attempts).toBe(i) + expect(actual.body.error.message).toBe("OTP is not valid") + } else { + expect(otpEntry?.attempts).toBe(maxNumOfOtpAttempts) + expect(actual.body.error.message).toBe( + "Max number of attempts reached" + ) + } + } + }) + + it("should reset otp attempts when new email otp is requested", async () => { + // Arrange + mockAxios.post.mockResolvedValue(200) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + + const numOfAttempts = 10 // arbitrary number > maxNumOfAttempts + for (let i = 1; i <= numOfAttempts; i++) { + await request(app).post("/email/verifyOtp").send({ + email: mockValidEmail, + otp: mockInvalidOtp, + userId: mockIsomerUserId, + }) + } + + let otpEntry = await Otp.findOne({ + where: { email: mockValidEmail }, + }) + + // Assert + expect(otpEntry?.attempts).toBe(maxNumOfOtpAttempts) + + // Request for new otp and ensure attempts are reset + await request(app).post("/email/otp").send({ + email: mockValidEmail, + }) + otpEntry = await Otp.findOne({ + where: { email: mockValidEmail }, + }) + + // Assert + expect(otpEntry?.attempts).toBe(0) + }) }) describe("/mobile/otp", () => { @@ -345,7 +471,7 @@ describe("Users Router", () => { it("should return 400 when the otp is wrong", async () => { // Arrange const expected = 400 - const wrongOtp = 123456 + const wrongOtp = "123456" mockAxios.post.mockResolvedValueOnce(200) await User.create({ id: mockIsomerUserId }) await request(app).post("/mobile/otp").send({ @@ -402,5 +528,118 @@ describe("Users Router", () => { // Assert expect(actual.statusCode).toBe(expected) }) + + it("should only ensure the latest mobile otp is valid", async () => { + // Arrange + const expected = 200 + let otp + mockAxios.post.mockImplementation((_: any, sms: any) => { + otp = extractMobileOtp(sms.body) + return sms + }) + await Whitelist.create({ email: mockWhitelistedDomain }) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + + // Act + const actual = await request(app).post("/mobile/verifyOtp").send({ + mobile: mockValidNumber, + otp, + userId: mockIsomerUserId, + }) + const oldOtp = otp + + // Assert + expect(actual.statusCode).toBe(expected) + + // Arrange + const newExpected = 400 + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + + const newActual = await request(app).post("/mobile/verifyOtp").send({ + mobile: mockValidNumber, + otp: oldOtp, + userId: mockIsomerUserId, + }) + + // Assert + expect(oldOtp).not.toBe(otp) + expect(newActual.statusCode).toBe(newExpected) + }) + + it("should return 400 when max number of mobile otp attempts is reached with correct error message", async () => { + // Arrange + const expected = 400 + mockAxios.post.mockResolvedValueOnce(200) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + + const numOfAttempts = 10 // arbitrary number > maxNumOfAttempts + for (let i = 1; i <= numOfAttempts; i++) { + const actual = await request(app).post("/mobile/verifyOtp").send({ + mobile: mockValidNumber, + otp: mockInvalidOtp, + userId: mockIsomerUserId, + }) + const otpEntry = await Otp.findOne({ + where: { mobileNumber: mockValidNumber }, + }) + + // Assert + expect(actual.statusCode).toBe(expected) + + if (i <= maxNumOfOtpAttempts) { + expect(otpEntry?.attempts).toBe(i) + expect(actual.body.error.message).toBe("OTP is not valid") + } else { + expect(otpEntry?.attempts).toBe(maxNumOfOtpAttempts) + expect(actual.body.error.message).toBe( + "Max number of attempts reached" + ) + } + } + }) + + it("should reset otp attempts when new mobile otp is requested", async () => { + // Arrange + mockAxios.post.mockResolvedValue(200) + await User.create({ id: mockIsomerUserId }) + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + + const numOfAttempts = 10 // arbitrary number > maxNumOfAttempts + for (let i = 1; i <= numOfAttempts; i++) { + await request(app).post("/mobile/verifyOtp").send({ + mobile: mockValidNumber, + otp: mockInvalidOtp, + userId: mockIsomerUserId, + }) + } + + let otpEntry = await Otp.findOne({ + where: { mobileNumber: mockValidNumber }, + }) + + // Assert + expect(otpEntry?.attempts).toBe(maxNumOfOtpAttempts) + + // Request for new otp and ensure attempts are reset + await request(app).post("/mobile/otp").send({ + mobile: mockValidNumber, + }) + otpEntry = await Otp.findOne({ + where: { mobileNumber: mockValidNumber }, + }) + + // Assert + expect(otpEntry?.attempts).toBe(0) + }) }) }) diff --git a/src/middleware/authentication.ts b/src/middleware/authentication.ts index cfbb36b9a..0d68cdb18 100644 --- a/src/middleware/authentication.ts +++ b/src/middleware/authentication.ts @@ -1,8 +1,14 @@ import autoBind from "auto-bind" import { NextFunction, Request, Response } from "express" +import { Session } from "express-session" import UserSessionData from "@root/classes/UserSessionData" import AuthenticationMiddlewareService from "@root/services/middlewareServices/AuthenticationMiddlewareService" +import { SessionData } from "@root/types/express/session" + +interface RequestWithSession extends Request { + session: Session & SessionData +} export class AuthenticationMiddleware { private readonly authenticationMiddlewareService: AuthenticationMiddlewareService @@ -17,16 +23,17 @@ export class AuthenticationMiddleware { autoBind(this) } - verifyJwt(req: Request, res: Response, next: NextFunction) { - const { cookies, originalUrl: url } = req + verifyAccess(req: RequestWithSession, res: Response, next: NextFunction) { + const { cookies, originalUrl: url, session } = req const { accessToken, githubId, isomerUserId, email, - } = this.authenticationMiddlewareService.verifyJwt({ + } = this.authenticationMiddlewareService.verifyAccess({ cookies, url, + userInfo: session.userInfo, }) const userSessionData = new UserSessionData({ accessToken, diff --git a/src/middleware/notificationOnEditHandler.ts b/src/middleware/notificationOnEditHandler.ts index 1a8a36f99..1eb1b9ef2 100644 --- a/src/middleware/notificationOnEditHandler.ts +++ b/src/middleware/notificationOnEditHandler.ts @@ -47,6 +47,9 @@ export class NotificationOnEditHandler { { userWithSiteSessionData: UserWithSiteSessionData } > = async (req, res, next) => { const { userWithSiteSessionData } = res.locals + + if (!userWithSiteSessionData.isEmailUser()) return + const { siteName, isomerUserId: userId, email } = userWithSiteSessionData const site = await this.sitesService.getBySiteName(siteName) const users = await this.collaboratorsService.list(siteName, userId) diff --git a/src/routes/v1/auth.js b/src/routes/v1/auth.js index 93e370696..09fa06170 100644 --- a/src/routes/v1/auth.js +++ b/src/routes/v1/auth.js @@ -3,6 +3,8 @@ const express = require("express") const queryString = require("query-string") const uuid = require("uuid/v4") +const logger = require("@logger/logger") + // Import error const { AuthError } = require("@errors/AuthError") const { ForbiddenError } = require("@errors/ForbiddenError") @@ -111,32 +113,20 @@ async function githubAuth(req, res) { const user = await identityServices.usersService.login(githubId) if (!user) throw Error("Failed to create user") - const authTokenExpiry = new Date() - authTokenExpiry.setTime(authTokenExpiry.getTime() + AUTH_TOKEN_EXPIRY_MS) - - const cookieSettings = { - path: "/", - expires: authTokenExpiry, - httpOnly: true, - sameSite: true, - secure: - process.env.NODE_ENV !== "DEV" && - process.env.NODE_ENV !== "LOCAL_DEV" && - process.env.NODE_ENV !== "test", + const userInfo = { + accessToken: jwtUtils.encryptToken(accessToken), + githubId, + isomerUserId: user.id, } - - const token = jwtUtils.signToken({ - access_token: jwtUtils.encryptToken(accessToken), - user_id: githubId, - isomer_user_id: user.id, - }) - - res.cookie(COOKIE_NAME, token, cookieSettings) + Object.assign(req.session, { userInfo }) + logger.info(`User ${userInfo.email} successfully logged in`) return res.redirect(`${FRONTEND_URL}/sites`) } async function logout(req, res) { clearAllCookies(res) + req.session.destroy() + logger.info(`User ${userInfo.email} successfully logged out`) return res.sendStatus(200) } @@ -173,7 +163,7 @@ router.get("/", attachReadRouteHandlerWrapper(githubAuth)) router.delete("/logout", attachReadRouteHandlerWrapper(logout)) router.get( "/whoami", - authenticationMiddleware.verifyJwt, + authenticationMiddleware.verifyAccess, attachReadRouteHandlerWrapper(whoami) ) diff --git a/src/routes/v1/authenticated/index.js b/src/routes/v1/authenticated/index.js index d0303a900..ad2522939 100644 --- a/src/routes/v1/authenticated/index.js +++ b/src/routes/v1/authenticated/index.js @@ -13,8 +13,8 @@ const getAuthenticatedSubrouter = ({ const authenticatedSubrouter = express.Router({ mergeParams: true }) - authenticatedSubrouter.use(authenticationMiddleware.verifyJwt) - // NOTE: apiLogger needs to be after `verifyJwt` as it logs the github username + authenticatedSubrouter.use(authenticationMiddleware.verifyAccess) + // NOTE: apiLogger needs to be after `verifyAccess` as it logs the github username // which is only available after verifying that the jwt is valid authenticatedSubrouter.use(apiLogger) authenticatedSubrouter.use("/sites", sitesRouter) diff --git a/src/routes/v1/authenticatedSites/collectionPages.js b/src/routes/v1/authenticatedSites/collectionPages.js index 61ea1873c..3c0f6c2b6 100644 --- a/src/routes/v1/authenticatedSites/collectionPages.js +++ b/src/routes/v1/authenticatedSites/collectionPages.js @@ -1,7 +1,6 @@ const Bluebird = require("bluebird") const express = require("express") const _ = require("lodash") -const yaml = require("yaml") // Import errors const { NotFoundError } = require("@errors/NotFoundError") @@ -20,6 +19,7 @@ const { File, CollectionPageType } = require("@classes/File") // Import utils const { readCollectionPageUtilFunc } = require("@utils/route-utils") +const { sanitizedYamlParse } = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) @@ -65,7 +65,9 @@ async function listCollectionPagesDetails(req, res) { collectionName, page.fileName ) - const frontMatter = yaml.parse(Base64.decode(content).split("---")[1]) + const frontMatter = sanitizedYamlParse( + Base64.decode(content).split("---")[1] + ) return { fileName: page.fileName, title: frontMatter.title, diff --git a/src/routes/v1/authenticatedSites/collections.js b/src/routes/v1/authenticatedSites/collections.js index a0c53af3f..40399e87d 100644 --- a/src/routes/v1/authenticatedSites/collections.js +++ b/src/routes/v1/authenticatedSites/collections.js @@ -1,5 +1,4 @@ const express = require("express") -const yaml = require("yaml") // Import middleware const { @@ -14,6 +13,10 @@ const { File, CollectionPageType, PageType } = require("@classes/File") const { Subfolder } = require("@classes/Subfolder") const { deslugifyCollectionName } = require("@utils/utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) @@ -148,13 +151,13 @@ async function moveFiles(req, res) { const [unused, encodedFrontMatter, pageContent] = Base64.decode( content ).split("---") - const frontMatter = yaml.parse(encodedFrontMatter) + const frontMatter = sanitizedYamlParse(encodedFrontMatter) if (targetSubfolderName) frontMatter.third_nav_title = deslugifyCollectionName( targetSubfolderName ) else delete frontMatter.third_nav_title - const newFrontMatter = yaml.stringify(frontMatter) + const newFrontMatter = sanitizedYamlStringify(frontMatter) const newContent = ["---\n", newFrontMatter, "---", pageContent].join("") const newEncodedContent = Base64.encode(newContent) await newIsomerFile.create(fileName, newEncodedContent) diff --git a/src/routes/v1/authenticatedSites/folders.js b/src/routes/v1/authenticatedSites/folders.js index 2e6d7fed9..53d26b200 100644 --- a/src/routes/v1/authenticatedSites/folders.js +++ b/src/routes/v1/authenticatedSites/folders.js @@ -1,6 +1,5 @@ const Bluebird = require("bluebird") const express = require("express") -const yaml = require("yaml") const { attachReadRouteHandlerWrapper, @@ -13,6 +12,10 @@ const { CollectionConfig } = require("@classes/Config") const { File, CollectionPageType } = require("@classes/File") const { getTree, sendTree, deslugifyCollectionName } = require("@utils/utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) @@ -113,7 +116,7 @@ async function renameSubfolder(req, res) { const decodedContent = Base64.decode(content) const results = decodedContent.split("---") - const frontMatter = yaml.parse(results[1]) // get the front matter as an object + const frontMatter = sanitizedYamlParse(results[1]) // get the front matter as an object const mdBody = results.slice(2).join("---") // Modify `third_nav_title` and save as new file in newSubfolderName @@ -124,7 +127,7 @@ async function renameSubfolder(req, res) { const newContent = [ "---\n", - yaml.stringify(newFrontMatter), + sanitizedYamlStringify(newFrontMatter), "---\n", mdBody, ].join("") diff --git a/src/routes/v1/authenticatedSites/index.js b/src/routes/v1/authenticatedSites/index.js index 9015a6662..e9d78524d 100644 --- a/src/routes/v1/authenticatedSites/index.js +++ b/src/routes/v1/authenticatedSites/index.js @@ -25,7 +25,7 @@ const getAuthenticatedSitesSubrouter = ({ }) => { const authenticatedSitesSubrouter = express.Router({ mergeParams: true }) - authenticatedSitesSubrouter.use(authenticationMiddleware.verifyJwt) + authenticatedSitesSubrouter.use(authenticationMiddleware.verifyAccess) authenticatedSitesSubrouter.use(attachSiteHandler) // NOTE: apiLogger needs to be after `verifyJwt` as it logs the github username // which is only available after verifying that the jwt is valid diff --git a/src/routes/v1/authenticatedSites/navigation.js b/src/routes/v1/authenticatedSites/navigation.js index d6d744560..6830e667e 100644 --- a/src/routes/v1/authenticatedSites/navigation.js +++ b/src/routes/v1/authenticatedSites/navigation.js @@ -1,5 +1,9 @@ const express = require("express") -const yaml = require("yaml") + +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) @@ -27,7 +31,7 @@ async function getNavigation(req, res) { return res.status(200).json({ sha, - content: yaml.parse(Base64.decode(content)), + content: sanitizedYamlParse(Base64.decode(content)), }) } @@ -43,7 +47,7 @@ async function updateNavigation(req, res) { IsomerFile.setFileType(dataType) await IsomerFile.update( NAVIGATION_PATH, - Base64.encode(yaml.stringify(content)), + Base64.encode(sanitizedYamlStringify(content)), sha ) diff --git a/src/routes/v1/authenticatedSites/pages.js b/src/routes/v1/authenticatedSites/pages.js index 8562d5ca3..8543300fe 100644 --- a/src/routes/v1/authenticatedSites/pages.js +++ b/src/routes/v1/authenticatedSites/pages.js @@ -1,5 +1,4 @@ const express = require("express") -const yaml = require("yaml") // Import middleware const { @@ -15,6 +14,10 @@ const { File, PageType, CollectionPageType } = require("@classes/File") const { Subfolder } = require("@classes/Subfolder") const { deslugifyCollectionName } = require("@utils/utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const router = express.Router({ mergeParams: true }) @@ -197,9 +200,9 @@ async function moveUnlinkedPages(req, res) { const [unused, encodedFrontMatter, pageContent] = Base64.decode( content ).split("---") - const frontMatter = yaml.parse(encodedFrontMatter) + const frontMatter = sanitizedYamlParse(encodedFrontMatter) frontMatter.third_nav_title = deslugifyCollectionName(targetSubfolderName) - const newFrontMatter = yaml.stringify(frontMatter) + const newFrontMatter = sanitizedYamlStringify(frontMatter) const newContent = ["---\n", newFrontMatter, "---", pageContent].join("") const newEncodedContent = Base64.encode(newContent) await newIsomerFile.create(fileName, newEncodedContent) diff --git a/src/routes/v2/__tests__/Auth.spec.js b/src/routes/v2/__tests__/Auth.spec.js index 845d3c73a..93711e03b 100644 --- a/src/routes/v2/__tests__/Auth.spec.js +++ b/src/routes/v2/__tests__/Auth.spec.js @@ -1,31 +1,52 @@ const express = require("express") +const session = require("express-session") const request = require("supertest") const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") const { generateRouter } = require("@fixtures/app") const { mockUserSessionData, mockEmail } = require("@fixtures/sessionData") +const { rateLimiter } = require("@root/services/utilServices/RateLimiter") const { CSRF_COOKIE_NAME, COOKIE_NAME, AuthRouter } = require("../auth") const { FRONTEND_URL } = process.env const csrfState = "csrfState" const cookieToken = "cookieToken" +const MOCK_USER_ID = "userId" describe("Unlinked Pages Router", () => { + jest.mock("@logger/logger", { + info: jest.fn(), + }) + const mockAuthService = { getAuthRedirectDetails: jest.fn(), - getGithubAuthToken: jest.fn(), + getUserInfoFromGithubAuth: jest.fn(), getUserInfo: jest.fn(), sendOtp: jest.fn(), verifyOtp: jest.fn(), } + const mockAuthenticationMiddleware = { + verifyJwt: jest.fn().mockImplementation((req, res, next) => next()), + } const router = new AuthRouter({ authService: mockAuthService, + authenticationMiddleware: mockAuthenticationMiddleware, + rateLimiter, }) const subrouter = express() + const options = { + resave: true, + saveUninitialized: true, + secret: "blah", + cookie: { + maxAge: 1209600000, + }, + } + subrouter.use(session(options)) // We can use read route handler here because we don't need to lock the repo subrouter.get( @@ -67,7 +88,7 @@ describe("Unlinked Pages Router", () => { const state = "state" const token = "token" it("retrieves the token and redirects back to the correct page after github auth", async () => { - mockAuthService.getGithubAuthToken.mockResolvedValueOnce({ + mockAuthService.getUserInfoFromGithubAuth.mockResolvedValueOnce({ token, }) @@ -75,16 +96,14 @@ describe("Unlinked Pages Router", () => { .get(`/?code=${code}&state=${state}`) .set("Cookie", `${CSRF_COOKIE_NAME}=${csrfState};`) - expect(mockAuthService.getGithubAuthToken).toHaveBeenCalledWith({ + expect(mockAuthService.getUserInfoFromGithubAuth).toHaveBeenCalledWith({ csrfState, code, state, }) expect(resp.status).toEqual(302) expect(resp.headers.location).toContain(`${FRONTEND_URL}/sites`) - expect(resp.headers["set-cookie"]).toEqual( - expect.arrayContaining([expect.stringContaining(COOKIE_NAME)]) - ) + expect(resp.headers["set-cookie"]).toBeTruthy() }) }) describe("login", () => { @@ -97,6 +116,9 @@ describe("Unlinked Pages Router", () => { }) describe("verify", () => { const mockOtp = "123456" + mockAuthService.verifyOtp.mockImplementationOnce(() => ({ + email: mockEmail, + })) it("adds the cookie on login", async () => { mockAuthService.getAuthRedirectDetails.mockResolvedValueOnce(cookieToken) await request(app) @@ -126,10 +148,9 @@ describe("Unlinked Pages Router", () => { }) describe("whoami", () => { - const userId = "userId" it("returns user info if found", async () => { const expectedResponse = { - userId, + userId: MOCK_USER_ID, } mockAuthService.getUserInfo.mockResolvedValueOnce(expectedResponse) diff --git a/src/routes/v2/auth.js b/src/routes/v2/auth.js index 59ae0ab34..076df60a2 100644 --- a/src/routes/v2/auth.js +++ b/src/routes/v2/auth.js @@ -1,6 +1,8 @@ const autoBind = require("auto-bind") const express = require("express") +const logger = require("@logger/logger") + // Import middleware const { attachReadRouteHandlerWrapper } = require("@middleware/routeHandler") @@ -16,10 +18,11 @@ const CSRF_COOKIE_NAME = "isomer-csrf" const COOKIE_NAME = "isomercms" class AuthRouter { - constructor({ authService, authenticationMiddleware, apiLogger }) { + constructor({ authService, authenticationMiddleware, apiLogger, rateLimiter }) { this.authService = authService this.authenticationMiddleware = authenticationMiddleware this.apiLogger = apiLogger + this.rateLimiter = rateLimiter // We need to bind all methods because we don't invoke them from the class directly autoBind(this) } @@ -54,52 +57,42 @@ class AuthRouter { const csrfState = req.cookies[CSRF_COOKIE_NAME] const { code, state } = req.query - const token = await this.authService.getGithubAuthToken({ + const userInfo = await this.authService.getUserInfoFromGithubAuth({ csrfState, code, state, }) - const authTokenExpiry = new Date() - // getTime allows this to work across timezones - authTokenExpiry.setTime(authTokenExpiry.getTime() + AUTH_TOKEN_EXPIRY_MS) - const cookieSettings = { - path: "/", - expires: authTokenExpiry, - httpOnly: true, - sameSite: true, - secure: isSecure(), - } - res.cookie(COOKIE_NAME, token, cookieSettings) + logger.info(`User ${userInfo.email} successfully logged in`) + Object.assign(req.session, { userInfo }) return res.redirect(`${FRONTEND_URL}/sites`) } async login(req, res) { const { email: rawEmail } = req.body const email = rawEmail.toLowerCase() - await this.authService.sendOtp(email) + try { + await this.authService.sendOtp(email) + } catch (err) { + // Log, but don't return so responses are indistinguishable + logger.error( + `Error occurred when attempting to login user ${email}: ${err}` + ) + } return res.sendStatus(200) } async verify(req, res) { const { email: rawEmail, otp } = req.body const email = rawEmail.toLowerCase() - const token = await this.authService.verifyOtp({ email, otp }) - const authTokenExpiry = new Date() - // getTime allows this to work across timezones - authTokenExpiry.setTime(authTokenExpiry.getTime() + AUTH_TOKEN_EXPIRY_MS) - const cookieSettings = { - path: "/", - expires: authTokenExpiry, - httpOnly: true, - sameSite: true, - secure: isSecure(), - } - res.cookie(COOKIE_NAME, token, cookieSettings) + const userInfo = await this.authService.verifyOtp({ email, otp }) + Object.assign(req.session, { userInfo }) + logger.info(`User ${userInfo.email} successfully logged in`) return res.sendStatus(200) } async logout(req, res) { this.clearIsomerCookies(res) + req.session.destroy() return res.sendStatus(200) } @@ -109,6 +102,7 @@ class AuthRouter { const userInfo = await this.authService.getUserInfo(userSessionData) if (!userInfo) { this.clearIsomerCookies(res) + req.session.destroy() return res.sendStatus(401) } return res.status(200).json(userInfo) @@ -118,6 +112,7 @@ class AuthRouter { const router = express.Router() router.use(this.apiLogger) + router.use(this.rateLimiter) router.get( "/github-redirect", attachReadRouteHandlerWrapper(this.authRedirect) @@ -128,7 +123,7 @@ class AuthRouter { router.delete("/logout", attachReadRouteHandlerWrapper(this.logout)) router.get( "/whoami", - this.authenticationMiddleware.verifyJwt, + this.authenticationMiddleware.verifyAccess, attachReadRouteHandlerWrapper(this.whoami) ) diff --git a/src/routes/v2/authenticated/index.js b/src/routes/v2/authenticated/index.js index 435fc03d6..287731070 100644 --- a/src/routes/v2/authenticated/index.js +++ b/src/routes/v2/authenticated/index.js @@ -43,7 +43,7 @@ const getAuthenticatedSubrouter = ({ const authenticatedSubrouter = express.Router({ mergeParams: true }) - authenticatedSubrouter.use(authenticationMiddleware.verifyJwt) + authenticatedSubrouter.use(authenticationMiddleware.verifyAccess) // NOTE: apiLogger needs to be after `verifyJwt` as it logs the github username // which is only available after verifying that the jwt is valid authenticatedSubrouter.use(apiLogger) diff --git a/src/routes/v2/authenticated/review.ts b/src/routes/v2/authenticated/review.ts index 2191ed03d..dcbb28a76 100644 --- a/src/routes/v2/authenticated/review.ts +++ b/src/routes/v2/authenticated/review.ts @@ -12,7 +12,7 @@ import { import UserSessionData from "@classes/UserSessionData" import UserWithSiteSessionData from "@classes/UserWithSiteSessionData" -import { CollaboratorRoles } from "@root/constants" +import { CollaboratorRoles, ReviewRequestStatus } from "@root/constants" import { SiteMember, User } from "@root/database/models" import CollaboratorsService from "@root/services/identity/CollaboratorsService" import NotificationsService from "@root/services/identity/NotificationsService" @@ -541,7 +541,9 @@ export class ReviewsRouter { // is an identity feature, we assume that **all** users calling this endpoint // will have a valid email (guaranteed by our modal) collaborator.email && - !!collaboratorMappings[collaborator.email] + !!collaboratorMappings[collaborator.email] && + // NOTE: Prevent the requestor from adding themselves as a reviewer + collaborator.email !== requestor.email ) if (verifiedReviewers.length !== reviewers.length) { @@ -677,7 +679,11 @@ export class ReviewsRouter { requestId ) - if (isIsomerError(possibleReviewRequest)) { + if ( + isIsomerError(possibleReviewRequest) || + // NOTE: Only allow approving review requests that are currently open + possibleReviewRequest.reviewStatus !== ReviewRequestStatus.Open + ) { logger.error({ message: "Invalid review request requested", method: "approveReviewRequest", @@ -697,7 +703,11 @@ export class ReviewsRouter { const { reviewers } = possibleReviewRequest const isReviewer = _.some( reviewers, - (user) => user.email === userWithSiteSessionData.email + (user) => + user.email === userWithSiteSessionData.email && + // NOTE: Check that the reviewer's email is not the requestor's email + // in order to prevent self approvals + user.email !== possibleReviewRequest.requestor.email ) if (!isReviewer) { diff --git a/src/routes/v2/authenticated/users.ts b/src/routes/v2/authenticated/users.ts index fba0d19e4..2b1467566 100644 --- a/src/routes/v2/authenticated/users.ts +++ b/src/routes/v2/authenticated/users.ts @@ -68,7 +68,9 @@ export class UsersRouter { const { email, otp } = req.body const { userSessionData } = res.locals const userId = userSessionData.isomerUserId - if (!this.usersService.verifyOtp(email, otp)) { + + const isOtpValid = await this.usersService.verifyEmailOtp(email, otp) + if (!isOtpValid) { throw new BadRequestError("Invalid OTP") } @@ -100,7 +102,9 @@ export class UsersRouter { const { mobile, otp } = req.body const { userSessionData } = res.locals const userId = userSessionData.isomerUserId - if (!this.usersService.verifyOtp(mobile, otp)) { + + const isOtpValid = await this.usersService.verifyMobileOtp(mobile, otp) + if (!isOtpValid) { throw new BadRequestError("Invalid OTP") } diff --git a/src/routes/v2/authenticatedSites/index.js b/src/routes/v2/authenticatedSites/index.js index 539ae3a15..0add03c75 100644 --- a/src/routes/v2/authenticatedSites/index.js +++ b/src/routes/v2/authenticatedSites/index.js @@ -190,7 +190,7 @@ const getAuthenticatedSitesSubrouter = ({ const authenticatedSitesSubrouter = express.Router({ mergeParams: true }) - authenticatedSitesSubrouter.use(authenticationMiddleware.verifyJwt) + authenticatedSitesSubrouter.use(authenticationMiddleware.verifyAccess) authenticatedSitesSubrouter.use(attachSiteHandler) // NOTE: apiLogger needs to be after `verifyJwt` as it logs the github username // which is only available after verifying that the jwt is valid diff --git a/src/server.js b/src/server.js index 4d01c7446..46b5436bd 100644 --- a/src/server.js +++ b/src/server.js @@ -1,5 +1,8 @@ import "dd-trace/init" import "module-alias/register" +import SequelizeStoreFactory from "connect-session-sequelize" +import session from "express-session" +import nocache from "nocache" import logger from "@logger/logger" @@ -11,6 +14,7 @@ import { Whitelist, AccessToken, Repo, + Otp, Deployment, Launch, Redirection, @@ -51,15 +55,22 @@ import getAuthenticatedSitesSubrouter from "./routes/v2/authenticatedSites" import CollaboratorsService from "./services/identity/CollaboratorsService" import LaunchClient from "./services/identity/LaunchClient" import LaunchesService from "./services/identity/LaunchesService" +import { rateLimiter } from "./services/utilServices/RateLimiter" const path = require("path") +const AUTH_TOKEN_EXPIRY_MS = parseInt( + process.env.AUTH_TOKEN_EXPIRY_DURATION_IN_MILLISECONDS, + 10 +) + const sequelize = initSequelize([ Site, SiteMember, User, Whitelist, AccessToken, + Otp, Repo, Deployment, Launch, @@ -79,6 +90,30 @@ const express = require("express") const helmet = require("helmet") const createError = require("http-errors") +const isSecure = + process.env.NODE_ENV !== "DEV" && + process.env.NODE_ENV !== "LOCAL_DEV" && + process.env.NODE_ENV !== "test" + +const SequelizeStore = SequelizeStoreFactory(session.Store) +const sessionMiddleware = session({ + store: new SequelizeStore({ + db: sequelize, + tableName: "sessions", + checkExpirationInterval: 15 * 60 * 1000, // Checks expired sessions every 15 minutes + }), + resave: false, // can set to false since touch is implemented by our store + saveUninitialized: false, // do not save new sessions that have not been modified + cookie: { + httpOnly: true, + sameSite: "strict", + secure: isSecure, + maxAge: AUTH_TOKEN_EXPIRY_MS, + }, + secret: process.env.SESSION_SECRET, + name: "isomer", +}) + // Env vars const { FRONTEND_URL } = process.env // Import middleware @@ -205,7 +240,12 @@ const authenticatedSitesSubrouterV2 = getAuthenticatedSitesSubrouter({ notificationsService, notificationOnEditHandler, }) -const authV2Router = new AuthRouter({ authenticationMiddleware, authService, apiLogger }) +const authV2Router = new AuthRouter({ + authenticationMiddleware, + authService, + apiLogger, + rateLimiter, +}) const formsgRouter = new FormsgRouter({ usersService, infraService }) const formsgSiteLaunchRouter = new FormsgSiteLaunchRouter({ usersService, @@ -213,6 +253,12 @@ const formsgSiteLaunchRouter = new FormsgSiteLaunchRouter({ }) const app = express() + +if (isSecure) { + // Our server only receives requests from the alb reverse proxy, so we need to use the client IP provided in X-Forwarded-For + // This is trusted because our security groups block all other access to the server + app.set("trust proxy", true) +} app.use(helmet()) app.use( @@ -225,6 +271,9 @@ app.use(express.json({ limit: "7mb" })) app.use(express.urlencoded({ extended: false })) app.use(cookieParser()) app.use(express.static(path.join(__dirname, "public"))) +app.use(nocache()) + +app.use(sessionMiddleware) // Health endpoint app.use("/v2/ping", (req, res, next) => res.status(200).send("Ok")) diff --git a/src/services/fileServices/MdPageServices/MediaFileService.js b/src/services/fileServices/MdPageServices/MediaFileService.js index 132377c94..8251cd3aa 100644 --- a/src/services/fileServices/MdPageServices/MediaFileService.js +++ b/src/services/fileServices/MdPageServices/MediaFileService.js @@ -1,3 +1,5 @@ +const logger = require("@logger/logger") + const { BadRequestError } = require("@errors/BadRequestError") const { MediaTypeError } = require("@errors/MediaTypeError") @@ -6,6 +8,7 @@ const { GITHUB_ORG_NAME } = process.env const { validateAndSanitizeFileUpload, ALLOWED_FILE_EXTENSIONS, + scanFileForVirus, } = require("@utils/file-upload-utils") const { isMediaPathValid } = require("@validators/validators") @@ -26,6 +29,18 @@ class MediaFileService { async create(sessionData, { fileName, directoryName, content }) { this.mediaNameChecks({ directoryName, fileName }) + + const [, fileContent] = content.split(",") + const fileBuffer = Buffer.from(fileContent, "base64") + + // Scan file for virus - cloudmersive API + const virusScanRes = await scanFileForVirus(fileBuffer) + logger.info(`File scan result: ${virusScanRes.CleanResult}`) + if (!virusScanRes || !virusScanRes.CleanResult) { + throw new BadRequestError("File did not pass virus scan") + } + + // Sanitize and validate file const sanitizedContent = await validateAndSanitizeFileUpload(content) if (!sanitizedContent) { throw new MediaTypeError(`File extension is not within the approved list`) diff --git a/src/services/fileServices/MdPageServices/__tests__/MediaFileService.spec.js b/src/services/fileServices/MdPageServices/__tests__/MediaFileService.spec.js index a6e28ff5d..c032418d2 100644 --- a/src/services/fileServices/MdPageServices/__tests__/MediaFileService.spec.js +++ b/src/services/fileServices/MdPageServices/__tests__/MediaFileService.spec.js @@ -8,7 +8,7 @@ describe("Media File Service", () => { const imageName = "test image.png" const fileName = "test file.pdf" const directoryName = "images/subfolder" - const mockContent = "test" + const mockContent = "schema, test" const mockSanitizedContent = "sanitized-test" const sha = "12345" const mockGithubSessionData = "githubData" @@ -33,6 +33,7 @@ describe("Media File Service", () => { .fn() .mockReturnValue(mockSanitizedContent), ALLOWED_FILE_EXTENSIONS: ["pdf"], + scanFileForVirus: jest.fn().mockReturnValue({ CleanResult: true }), })) const { diff --git a/src/services/fileServices/YmlFileServices/CollectionYmlService.js b/src/services/fileServices/YmlFileServices/CollectionYmlService.js index 8cd4e9500..1dd63433b 100644 --- a/src/services/fileServices/YmlFileServices/CollectionYmlService.js +++ b/src/services/fileServices/YmlFileServices/CollectionYmlService.js @@ -1,5 +1,9 @@ const _ = require("lodash") -const yaml = require("yaml") + +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const COLLECTION_FILE_NAME = "collection.yml" @@ -16,12 +20,12 @@ class CollectionYmlService { directoryName: `_${collectionName}`, } ) - const content = yaml.parse(unparsedContent) + const content = sanitizedYamlParse(unparsedContent) return { content, sha } } async update(sessionData, { collectionName, fileContent, sha }) { - const stringifiedContent = yaml.stringify(fileContent) + const stringifiedContent = sanitizedYamlStringify(fileContent) const { newSha } = await this.gitHubService.update(sessionData, { fileContent: stringifiedContent, sha, @@ -40,7 +44,7 @@ class CollectionYmlService { }, }, } - const stringifiedContent = yaml.stringify(contentObject) + const stringifiedContent = sanitizedYamlStringify(contentObject) return this.gitHubService.create(sessionData, { content: stringifiedContent, fileName: COLLECTION_FILE_NAME, @@ -176,7 +180,7 @@ class CollectionYmlService { }, }, } - const stringifiedContent = yaml.stringify(contentObject) + const stringifiedContent = sanitizedYamlStringify(contentObject) return this.gitHubService.update(sessionData, { directoryName: `_${collectionName}`, fileContent: stringifiedContent, diff --git a/src/services/fileServices/YmlFileServices/ConfigYmlService.js b/src/services/fileServices/YmlFileServices/ConfigYmlService.js index a980581fa..58e0ba269 100644 --- a/src/services/fileServices/YmlFileServices/ConfigYmlService.js +++ b/src/services/fileServices/YmlFileServices/ConfigYmlService.js @@ -1,4 +1,7 @@ -const yaml = require("yaml") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const CONFIG_FILE_NAME = "_config.yml" @@ -14,12 +17,12 @@ class ConfigYmlService { fileName: CONFIG_FILE_NAME, } ) - const content = yaml.parse(unparsedContent) + const content = sanitizedYamlParse(unparsedContent) return { content, sha } } async update(reqDetails, { fileContent, sha }) { - const stringifiedContent = yaml.stringify(fileContent) + const stringifiedContent = sanitizedYamlStringify(fileContent) const { newSha } = await this.gitHubService.update(reqDetails, { fileContent: stringifiedContent, sha, diff --git a/src/services/fileServices/YmlFileServices/FooterYmlService.js b/src/services/fileServices/YmlFileServices/FooterYmlService.js index 101d98651..ce0f4616d 100644 --- a/src/services/fileServices/YmlFileServices/FooterYmlService.js +++ b/src/services/fileServices/YmlFileServices/FooterYmlService.js @@ -1,4 +1,7 @@ -const yaml = require("yaml") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const FOOTER_FILE_NAME = "footer.yml" const FOOTER_FILE_DIR = "_data" @@ -16,12 +19,12 @@ class FooterYmlService { directoryName: FOOTER_FILE_DIR, } ) - const content = yaml.parse(unparsedContent) + const content = sanitizedYamlParse(unparsedContent) return { content, sha } } async update(sessionData, { fileContent, sha }) { - const stringifiedContent = yaml.stringify(fileContent) + const stringifiedContent = sanitizedYamlStringify(fileContent) const { newSha } = await this.gitHubService.update(sessionData, { fileContent: stringifiedContent, sha, diff --git a/src/services/fileServices/YmlFileServices/NavYmlService.js b/src/services/fileServices/YmlFileServices/NavYmlService.js index 7b14c87aa..5ea2385f3 100644 --- a/src/services/fileServices/YmlFileServices/NavYmlService.js +++ b/src/services/fileServices/YmlFileServices/NavYmlService.js @@ -1,6 +1,8 @@ -const yaml = require("yaml") - const { deslugifyCollectionName } = require("@utils/utils") +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const NAV_FILE_NAME = "navigation.yml" const NAV_FILE_DIR = "_data" @@ -18,12 +20,12 @@ class NavYmlService { directoryName: NAV_FILE_DIR, } ) - const content = yaml.parse(unparsedContent) + const content = sanitizedYamlParse(unparsedContent) return { content, sha } } async update(sessionData, { fileContent, sha }) { - const stringifiedContent = yaml.stringify(fileContent) + const stringifiedContent = sanitizedYamlStringify(fileContent) const { newSha } = await this.gitHubService.update(sessionData, { fileContent: stringifiedContent, sha, diff --git a/src/services/fileServices/YmlFileServices/__tests__/CollectionYmlService.spec.js b/src/services/fileServices/YmlFileServices/__tests__/CollectionYmlService.spec.js index aac57b626..4205faf20 100644 --- a/src/services/fileServices/YmlFileServices/__tests__/CollectionYmlService.spec.js +++ b/src/services/fileServices/YmlFileServices/__tests__/CollectionYmlService.spec.js @@ -3,7 +3,8 @@ const { } = require("@services/fileServices/YmlFileServices/CollectionYmlService") const COLLECTION_FILE_NAME = "collection.yml" -const yaml = require("yaml") +const { sanitizedYamlStringify } = require("@utils/yaml-utils") + const _ = require("lodash") describe("Collection Yml Service", () => { @@ -33,7 +34,7 @@ describe("Collection Yml Service", () => { }, }, } - const mockRawContent = yaml.stringify(mockParsedContent) + const mockRawContent = sanitizedYamlStringify(mockParsedContent) const mockGithubService = { create: jest.fn(), @@ -92,7 +93,7 @@ describe("Collection Yml Service", () => { mockGithubService.create.mockResolvedValueOnce({ sha }) }) it("Creating a collection.yml file with no specified files works correctly", async () => { - const content = yaml.stringify({ + const content = sanitizedYamlStringify({ collections: { [collectionName]: { output: true, @@ -114,7 +115,7 @@ describe("Collection Yml Service", () => { }) }) it("Creating a collection.yml file with specified files works correctly", async () => { - const content = yaml.stringify({ + const content = sanitizedYamlStringify({ collections: { [collectionName]: { output: true, @@ -167,7 +168,7 @@ describe("Collection Yml Service", () => { const expectedArray = [newFileName, ...orderArray] const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -189,7 +190,7 @@ describe("Collection Yml Service", () => { const expectedArray = [newFileName, ...orderArray] const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -212,7 +213,7 @@ describe("Collection Yml Service", () => { expectedArray.splice(1, 0, newFileName) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -236,7 +237,7 @@ describe("Collection Yml Service", () => { expectedArray.splice(addedIndex, 0, newFileName) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -261,7 +262,7 @@ describe("Collection Yml Service", () => { expectedArray.splice(addedIndex, 0, newFileName) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.addItemToOrder(reqDetails, { collectionName, @@ -296,7 +297,7 @@ describe("Collection Yml Service", () => { ) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.deleteItemFromOrder(reqDetails, { collectionName, @@ -318,7 +319,7 @@ describe("Collection Yml Service", () => { const expectedArray = orderArray.filter((item) => item !== itemName) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.deleteItemFromOrder(reqDetails, { collectionName, @@ -366,7 +367,7 @@ describe("Collection Yml Service", () => { expectedArray.splice(index, 0, renamedItem) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.updateItemInOrder(reqDetails, { collectionName, @@ -400,7 +401,7 @@ describe("Collection Yml Service", () => { [renamedCollection]: mockParsedContent.collections[collectionName], }, } - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.renameCollectionInOrder(reqDetails, { oldCollectionName: collectionName, @@ -432,7 +433,7 @@ describe("Collection Yml Service", () => { ) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.deleteSubfolderFromOrder(reqDetails, { collectionName, @@ -465,7 +466,7 @@ describe("Collection Yml Service", () => { ) const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = expectedArray - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.renameSubfolderInOrder(reqDetails, { collectionName, @@ -502,7 +503,7 @@ describe("Collection Yml Service", () => { ] const modifiedParsedContent = _.cloneDeep(mockParsedContent) modifiedParsedContent.collections[collectionName].order = newOrder - const modifiedRawContent = yaml.stringify(modifiedParsedContent) + const modifiedRawContent = sanitizedYamlStringify(modifiedParsedContent) await expect( service.updateOrder(reqDetails, { collectionName, diff --git a/src/services/fileServices/YmlFileServices/__tests__/NavYmlService.spec.js b/src/services/fileServices/YmlFileServices/__tests__/NavYmlService.spec.js index 5a9be608b..27e212f14 100644 --- a/src/services/fileServices/YmlFileServices/__tests__/NavYmlService.spec.js +++ b/src/services/fileServices/YmlFileServices/__tests__/NavYmlService.spec.js @@ -12,7 +12,8 @@ const { const NAV_FILE_NAME = "navigation.yml" const NAV_FILE_DIR = "_data" -const yaml = require("yaml") +const { sanitizedYamlStringify } = require("@utils/yaml-utils") + const _ = require("lodash") describe("Nav Yml Service", () => { @@ -142,7 +143,7 @@ describe("Nav Yml Service", () => { expect(mockGithubService.update).toHaveBeenCalledWith(reqDetails, { fileName, directoryName, - fileContent: yaml.stringify(updatedMockParsedContent), + fileContent: sanitizedYamlStringify(updatedMockParsedContent), sha: mockNavigationSha, }) }) @@ -182,7 +183,7 @@ describe("Nav Yml Service", () => { expect(mockGithubService.update).toHaveBeenCalledWith(reqDetails, { fileName, directoryName, - fileContent: yaml.stringify(updatedMockParsedContent), + fileContent: sanitizedYamlStringify(updatedMockParsedContent), sha: mockNavigationSha, }) }) @@ -214,7 +215,7 @@ describe("Nav Yml Service", () => { expect(mockGithubService.update).toHaveBeenCalledWith(reqDetails, { fileName, directoryName, - fileContent: yaml.stringify(updatedMockParsedContent), + fileContent: sanitizedYamlStringify(updatedMockParsedContent), sha: mockNavigationSha, }) }) diff --git a/src/services/identity/CollaboratorsService.ts b/src/services/identity/CollaboratorsService.ts index df09946a8..c9b8b1efb 100644 --- a/src/services/identity/CollaboratorsService.ts +++ b/src/services/identity/CollaboratorsService.ts @@ -90,6 +90,10 @@ class CollaboratorsService { { model: User, as: "site_members", + attributes: { + // Hide PII such as contactNumber + exclude: ["contactNumber"], + }, }, { model: Repo, diff --git a/src/services/identity/OtpService.ts b/src/services/identity/OtpService.ts new file mode 100644 index 000000000..3583f997e --- /dev/null +++ b/src/services/identity/OtpService.ts @@ -0,0 +1,28 @@ +import crypto from "crypto" + +import bcrypt from "bcrypt" + +const SALT_TIMES = 10 +const TOTP_LENGTH = 6 + +class OtpService { + private generateOtp = (): string => + // Generates cryptographically strong pseudo-random data. + Array(TOTP_LENGTH) + .fill(0) + .map(() => crypto.randomInt(0, 10)) + .join("") + + generateLoginOtpWithHash = async () => { + const otp = this.generateOtp() + const hashedOtp = await bcrypt.hash(otp, SALT_TIMES) + return { otp, hashedOtp } + } + + verifyOtp = async (otp: string, hashedOtp: string): Promise => { + if (!otp || !hashedOtp) return false + return bcrypt.compare(otp, hashedOtp) + } +} + +export default OtpService diff --git a/src/services/identity/SitesService.ts b/src/services/identity/SitesService.ts index 2361070d6..88a040060 100644 --- a/src/services/identity/SitesService.ts +++ b/src/services/identity/SitesService.ts @@ -1,7 +1,7 @@ import _ from "lodash" import { ModelStatic } from "sequelize" -import { Deployment, Site, Repo } from "@database/models" +import { Deployment, Repo, Site } from "@database/models" import type UserSessionData from "@root/classes/UserSessionData" import type UserWithSiteSessionData from "@root/classes/UserWithSiteSessionData" import { @@ -68,9 +68,9 @@ class SitesService { this.reviewRequestService = reviewRequestService } - isGitHubCommitData(commit: any): commit is GitHubCommitData { + isGitHubCommitData(commit: unknown): commit is GitHubCommitData { return ( - commit && + !!commit && (commit as GitHubCommitData).author !== undefined && (commit as GitHubCommitData).author.name !== undefined && (commit as GitHubCommitData).author.date !== undefined && diff --git a/src/services/identity/SmsClient.ts b/src/services/identity/SmsClient.ts index b70f5eff0..d4a88fb2c 100644 --- a/src/services/identity/SmsClient.ts +++ b/src/services/identity/SmsClient.ts @@ -36,7 +36,7 @@ class SmsClient { try { await this.axiosClient.post(endpoint, sms) } catch (err) { - logger.error(err) + logger.error(`Failed to send SMS to ${recipient}: ${err}`) throw new Error("Failed to send SMS.") } } diff --git a/src/services/identity/UsersService.ts b/src/services/identity/UsersService.ts index 2af2782ba..c15919845 100644 --- a/src/services/identity/UsersService.ts +++ b/src/services/identity/UsersService.ts @@ -2,24 +2,40 @@ import { Op, ModelStatic } from "sequelize" import { Sequelize } from "sequelize-typescript" import { RequireAtLeastOne } from "type-fest" -import { Repo, Site, User, Whitelist, SiteMember } from "@database/models" +import { Otp, Repo, Site, User, Whitelist, SiteMember } from "@database/models" +import { BadRequestError } from "@root/errors/BadRequestError" +import { milliSecondsToMinutes } from "@root/utils/time-utils" import SmsClient from "@services/identity/SmsClient" import TotpGenerator from "@services/identity/TotpGenerator" import MailClient from "@services/utilServices/MailClient" +import OtpService from "./OtpService" + +const { OTP_EXPIRY, MAX_NUM_OTP_ATTEMPTS } = process.env + +const PARSED_EXPIRY = parseInt(OTP_EXPIRY || "", 10) ?? undefined + +const PARSED_MAX_NUM_OTP_ATTEMPTS = + parseInt(MAX_NUM_OTP_ATTEMPTS || "", 10) ?? 5 + +enum OtpType { + Email = "EMAIL", + Mobile = "MOBILE", +} + interface UsersServiceProps { - otp: TotpGenerator mailer: MailClient smsClient: SmsClient repository: ModelStatic sequelize: Sequelize whitelist: ModelStatic + otpService: OtpService + otpRepository: ModelStatic } class UsersService { // NOTE: Explicitly specifying using keyed properties to ensure // that the types are synced. - private readonly otp: UsersServiceProps["otp"] private readonly mailer: UsersServiceProps["mailer"] @@ -31,20 +47,26 @@ class UsersService { private readonly whitelist: UsersServiceProps["whitelist"] + private readonly otpService: UsersServiceProps["otpService"] + + private readonly otpRepository: UsersServiceProps["otpRepository"] + constructor({ - otp, mailer, smsClient, repository, sequelize, whitelist, + otpService, + otpRepository, }: UsersServiceProps) { - this.otp = otp this.mailer = mailer this.smsClient = smsClient this.repository = repository this.sequelize = sequelize this.whitelist = whitelist + this.otpService = otpService + this.otpRepository = otpRepository } async findById(id: string) { @@ -196,26 +218,120 @@ class UsersService { } async sendEmailOtp(email: string) { - const otp = this.otp.generate(email) - const expiry = this.otp.getExpiryMinutes() + const { otp, hashedOtp } = await this.otpService.generateLoginOtpWithHash() + + // Reset attempts to login + const otpEntry = await this.otpRepository.findOne({ where: { email } }) + if (!otpEntry) { + // create new entry + await this.createOtpEntry(email, OtpType.Email, hashedOtp) + } else { + await otpEntry?.update({ + hashedOtp, + attempts: 0, + expiresAt: this.getOtpExpiry(), + }) + } const subject = "One-Time Password (OTP) for IsomerCMS" - const html = `

Your OTP is ${otp}. It will expire in ${expiry} minutes. Please use this to verify your email address.

+ const html = `

Your OTP is ${otp}. It will expire in ${milliSecondsToMinutes( + PARSED_EXPIRY + )} minutes. Please use this to verify your email address.

If your OTP does not work, please request for a new OTP.

IsomerCMS Support Team

` await this.mailer.sendMail(email, subject, html) } async sendSmsOtp(mobileNumber: string) { - const otp = this.otp.generate(mobileNumber) - const expiry = this.otp.getExpiryMinutes() + const { otp, hashedOtp } = await this.otpService.generateLoginOtpWithHash() + + // Reset attempts to login + const otpEntry = await this.otpRepository.findOne({ + where: { mobileNumber }, + }) + if (!otpEntry) { + await this.createOtpEntry(mobileNumber, OtpType.Mobile, hashedOtp) + } else { + await otpEntry?.update({ hashedOtp, attempts: 0 }) + } - const message = `Your OTP is ${otp}. It will expire in ${expiry} minutes. Please use this to verify your mobile number` + const message = `Your OTP is ${otp}. It will expire in ${milliSecondsToMinutes( + PARSED_EXPIRY + )} minutes. Please use this to verify your mobile number` await this.smsClient.sendSms(mobileNumber, message) } - verifyOtp(value: string, otp: string) { - return this.otp.verify(value, otp) + private async verifyOtp(otpEntry: Otp | null, otp: string) { + // TODO: Change all the following to use AuthError after FE fix + if (!otp || otp === "") { + throw new BadRequestError("Empty OTP provided") + } + + if (!otpEntry) { + throw new BadRequestError("OTP not found") + } + + if (otpEntry.attempts >= PARSED_MAX_NUM_OTP_ATTEMPTS) { + throw new BadRequestError("Max number of attempts reached") + } + + if (!otpEntry?.hashedOtp) { + await otpEntry.destroy() + throw new BadRequestError("Hashed OTP not found") + } + + // increment attempts + await otpEntry.update({ attempts: otpEntry.attempts + 1 }) + + const isValidOtp = await this.otpService.verifyOtp(otp, otpEntry.hashedOtp) + if (!isValidOtp) { + throw new BadRequestError("OTP is not valid") + } + + if (isValidOtp && otpEntry.expiresAt < new Date()) { + await otpEntry.destroy() + throw new BadRequestError("OTP has expired") + } + + // destroy otp before returning true since otp has been "used" + await otpEntry.destroy() + return true + } + + async verifyEmailOtp(email: string, otp: string) { + const otpEntry = await this.otpRepository.findOne({ where: { email } }) + return this.verifyOtp(otpEntry, otp) + } + + async verifyMobileOtp(mobileNumber: string, otp: string) { + const otpEntry = await this.otpRepository.findOne({ + where: { mobileNumber }, + }) + return this.verifyOtp(otpEntry, otp) + } + + private getOtpExpiry() { + return new Date(Date.now() + PARSED_EXPIRY) + } + + private async createOtpEntry( + key: string, + keyType: OtpType, + hashedOtp: string + ) { + if (keyType === OtpType.Email) { + await this.otpRepository.create({ + email: key, + hashedOtp, + expiresAt: this.getOtpExpiry(), + }) + } else { + await this.otpRepository.create({ + mobileNumber: key, + hashedOtp, + expiresAt: this.getOtpExpiry(), + }) + } } } diff --git a/src/services/identity/__tests__/SitesService.spec.ts b/src/services/identity/__tests__/SitesService.spec.ts index b1ec314cd..3dd67f0aa 100644 --- a/src/services/identity/__tests__/SitesService.spec.ts +++ b/src/services/identity/__tests__/SitesService.spec.ts @@ -1,6 +1,6 @@ import { ModelStatic } from "sequelize" -import { Deployment, Site, User, Repo } from "@database/models" +import { Deployment, Repo, Site, User } from "@database/models" import { MOCK_COMMIT_MESSAGE_OBJECT_ONE, MOCK_COMMIT_MESSAGE_OBJECT_TWO, diff --git a/src/services/identity/__tests__/UsersService.spec.ts b/src/services/identity/__tests__/UsersService.spec.ts index 14da61406..c0e7b0286 100644 --- a/src/services/identity/__tests__/UsersService.spec.ts +++ b/src/services/identity/__tests__/UsersService.spec.ts @@ -1,43 +1,53 @@ import { Sequelize } from "sequelize-typescript" import { ModelStatic } from "sequelize/types" -import { User, Whitelist } from "@root/database/models" +import { Otp, User, Whitelist } from "@root/database/models" import SmsClient from "@services/identity/SmsClient" import TotpGenerator from "@services/identity/TotpGenerator" import MailClient from "@services/utilServices/MailClient" +import OtpService from "../OtpService" import _UsersService from "../UsersService" -const MockOtp = { - generate: jest.fn(), - getExpiryMinutes: jest.fn(), - verify: jest.fn(), +const MockOtpService = { + generateLoginOtpWithHash: jest.fn(), + verifyOtp: jest.fn(), } + const MockMailer = ({ sendMail: jest.fn(), } as unknown) as MailClient + const MockSmsClient = { sendSms: jest.fn(), } + const MockRepository = { findOne: jest.fn(), update: jest.fn(), create: jest.fn(), } + const MockSequelize = { transaction: jest.fn((closure) => closure("transaction")), } + const MockWhitelist = { findAll: jest.fn(), } +const MockOtp = { + findOne: jest.fn(), +} + const UsersService = new _UsersService({ - otp: (MockOtp as unknown) as TotpGenerator, mailer: (MockMailer as unknown) as MailClient, smsClient: (MockSmsClient as unknown) as SmsClient, repository: (MockRepository as unknown) as ModelStatic, sequelize: (MockSequelize as unknown) as Sequelize, whitelist: (MockWhitelist as unknown) as ModelStatic, + otpService: (MockOtpService as unknown) as OtpService, + otpRepository: (MockOtp as unknown) as ModelStatic, }) const mockEmail = "someone@tech.gov.sg" diff --git a/src/services/identity/index.ts b/src/services/identity/index.ts index 91791e8d2..ebe3ed91a 100644 --- a/src/services/identity/index.ts +++ b/src/services/identity/index.ts @@ -8,6 +8,7 @@ import { IsomerAdmin, Notification, SiteMember, + Otp, } from "@database/models" import { GitHubService } from "@services/db/GitHubService" import SmsClient from "@services/identity/SmsClient" @@ -17,6 +18,7 @@ import { mailer } from "@services/utilServices/MailClient" import AuthService from "./AuthService" import IsomerAdminsService from "./IsomerAdminsService" import NotificationsService from "./NotificationsService" +import OtpService from "./OtpService" import UsersService from "./UsersService" const { OTP_EXPIRY, OTP_SECRET, NODE_ENV } = process.env @@ -40,16 +42,19 @@ const smsClient = IS_LOCAL_DEV } as SmsClient) : new SmsClient() +export const otpService = new OtpService() + // NOTE: This is because the usersService requires an instance of sequelize // as it requires a transaction for certain methods export const getUsersService = (sequelize: Sequelize) => new UsersService({ repository: User, - otp: totpGenerator, mailer, smsClient, sequelize, whitelist: Whitelist, + otpService, + otpRepository: Otp, }) // NOTE: This is because the identity auth service has an diff --git a/src/services/middlewareServices/AuthenticationMiddlewareService.ts b/src/services/middlewareServices/AuthenticationMiddlewareService.ts index 10796fc27..60642d8f5 100644 --- a/src/services/middlewareServices/AuthenticationMiddlewareService.ts +++ b/src/services/middlewareServices/AuthenticationMiddlewareService.ts @@ -1,4 +1,6 @@ // Import logger +import _ from "lodash" + import logger from "@logger/logger" // Import errors @@ -8,6 +10,7 @@ import jwtUtils from "@utils/jwt-utils" import { E2E_TEST_EMAIL, E2E_ISOMER_ID } from "@root/constants" import { BadRequestError } from "@root/errors/BadRequestError" +import { SessionData } from "@root/types/express/session" const { E2E_TEST_REPO, E2E_TEST_SECRET, E2E_TEST_GH_TOKEN } = process.env const E2E_TEST_USER = "e2e-test" @@ -18,7 +21,7 @@ const GENERAL_ACCESS_PATHS = [ "/v2/auth/whoami", ] -interface VerifyJwtProps { +type VerifyAccessProps = SessionData & { cookies: { isomercms: string isomercmsE2E?: string @@ -27,7 +30,7 @@ interface VerifyJwtProps { } export default class AuthenticationMiddlewareService { - verifyE2E({ cookies, url }: VerifyJwtProps) { + verifyE2E({ cookies, url }: Omit) { const { isomercmsE2E } = cookies const urlTokens = url.split("/") // urls take the form "/v1/sites//"" @@ -48,8 +51,7 @@ export default class AuthenticationMiddlewareService { return true } - verifyJwt({ cookies, url }: VerifyJwtProps) { - const { isomercms } = cookies + verifyAccess({ cookies, url, userInfo }: VerifyAccessProps) { const isValidE2E = this.verifyE2E({ cookies, url }) if (isValidE2E) { @@ -59,22 +61,18 @@ export default class AuthenticationMiddlewareService { const email = E2E_TEST_EMAIL return { accessToken, githubId, isomerUserId, email } } - if (!isomercms) { - logger.error(`Authentication error: JWT token expired. Url: ${url}`) - throw new AuthError(`JWT token has expired`) - } try { - const { - access_token: retrievedToken, - user_id: githubId, - isomer_user_id: isomerUserId, - email, - } = jwtUtils.verifyToken(isomercms) - if (!isomerUserId) { + if (_.isEmpty(userInfo)) { const notLoggedInError = new Error("User not logged in with email") notLoggedInError.name = "NotLoggedInError" throw notLoggedInError } + const { + accessToken: retrievedToken, + githubId, + isomerUserId, + email, + } = userInfo const accessToken = retrievedToken ? jwtUtils.decryptToken(retrievedToken) : "" diff --git a/src/services/utilServices/AuthService.js b/src/services/utilServices/AuthService.js index dfc3b4283..bb91275dc 100644 --- a/src/services/utilServices/AuthService.js +++ b/src/services/utilServices/AuthService.js @@ -18,6 +18,8 @@ const { BadRequestError } = require("@root/errors/BadRequestError") const logger = require("@root/logger/logger") const { isError } = require("@root/types") +const { OtpType } = require("../identity/UsersService") + const { CLIENT_ID, CLIENT_SECRET, REDIRECT_URI } = process.env class AuthService { @@ -34,7 +36,7 @@ class AuthService { return { redirectUrl: githubAuthUrl, cookieToken: token } } - async getGithubAuthToken({ csrfState, code, state }) { + async getUserInfoFromGithubAuth({ csrfState, code, state }) { try { const decoded = jwtUtils.verifyToken(csrfState) if (decoded.state !== state) { @@ -87,14 +89,14 @@ class AuthService { const user = await this.usersService.login(githubId) if (!user) throw Error("Failed to create user") - const token = jwtUtils.signToken({ - access_token: jwtUtils.encryptToken(accessToken), - user_id: githubId, - isomer_user_id: user.id, + const userInfo = { + accessToken: jwtUtils.encryptToken(accessToken), + githubId, + isomerUserId: user.id, email: user.email, - }) + } - return token + return userInfo } async sendOtp(email) { @@ -120,16 +122,18 @@ class AuthService { } async verifyOtp({ email, otp }) { - if (!this.usersService.verifyOtp(email, otp)) { + const isOtpValid = await this.usersService.verifyEmailOtp(email, otp) + + if (!isOtpValid) { throw new BadRequestError("You have entered an invalid OTP.") } // Create user if does not exists. Set last logged in to current time. const user = await this.usersService.loginWithEmail(email) - const token = jwtUtils.signToken({ - isomer_user_id: user.id, + const userInfo = { + isomerUserId: user.id, email: user.email, - }) - return token + } + return userInfo } async getUserInfo(sessionData) { diff --git a/src/services/utilServices/MailClient.ts b/src/services/utilServices/MailClient.ts index 50c519c93..82a257efa 100644 --- a/src/services/utilServices/MailClient.ts +++ b/src/services/utilServices/MailClient.ts @@ -32,7 +32,7 @@ class MailClient { }, }) } catch (err) { - logger.error(err) + logger.error(`Error occurred when sending email to ${recipient}: ${err}`) throw new Error("Failed to send email.") } } diff --git a/src/services/utilServices/RateLimiter.ts b/src/services/utilServices/RateLimiter.ts new file mode 100644 index 000000000..af96f3a7b --- /dev/null +++ b/src/services/utilServices/RateLimiter.ts @@ -0,0 +1,24 @@ +import rateLimit from "express-rate-limit" + +const DEFAULT_AUTH_TOKEN_EXPIRY_MILLISECONDS = 900000 + +// NOTE: Refer here for more information regarding the implementation: +// https://github.com/express-rate-limit/express-rate-limit +// Also, note that our production environment has 2 instances +// and the rate limiter uses an in memory store, +// so our effective limit is 100 * 2. +// This also implies that a client can hit the limit on 1 server +// but not on the other, leading to inconsistent behaviour. +// eslint-disable-next-line import/prefer-default-export +export const rateLimiter = rateLimit({ + windowMs: + parseInt( + process.env.AUTH_TOKEN_EXPIRY_DURATION_IN_MILLISECONDS || + DEFAULT_AUTH_TOKEN_EXPIRY_MILLISECONDS.toString(), + 10 + ) / + (1000 * 60), + max: 100, // Limit each IP to 100 requests per `window` (here, per 15 minutes) + standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers + legacyHeaders: false, // Disable the `X-RateLimit-*` headers +}) diff --git a/src/services/utilServices/__tests__/AuthService.spec.js b/src/services/utilServices/__tests__/AuthService.spec.js index 4c6fceabf..88982a4b1 100644 --- a/src/services/utilServices/__tests__/AuthService.spec.js +++ b/src/services/utilServices/__tests__/AuthService.spec.js @@ -22,6 +22,7 @@ const { mockGithubId: mockUserId, mockSessionDataEmailUser, } = require("@fixtures/sessionData") +const { OtpType } = require("@root/services/identity/UsersService") const { AuthService } = require("@services/utilServices/AuthService") describe("Auth Service", () => { @@ -31,7 +32,14 @@ describe("Auth Service", () => { const state = "state" const token = "token" - const signedToken = "signedToken" + const signedGithubToken = { + accessToken: token, + githubId: mockGithubId, + } + const signedEmailToken = { + email: mockEmail, + isomerUserId: mockIsomerUserId, + } const csrfState = "csrfState" const mockContactNumber = "12345678" @@ -46,7 +54,7 @@ describe("Auth Service", () => { .mockImplementation(() => ({ contactNumber: mockContactNumber })), canSendEmailOtp: jest.fn(), sendEmailOtp: jest.fn(), - verifyOtp: jest.fn(), + verifyEmailOtp: jest.fn(), loginWithEmail: jest .fn() .mockImplementation(() => ({ id: mockIsomerUserId, email: mockEmail })), @@ -69,7 +77,7 @@ describe("Auth Service", () => { }) }) - describe("getGithubAuthToken", () => { + describe("getUserInfoFromGithubAuth", () => { it("Retrieves the Github auth token", async () => { const params = { code: "code", @@ -80,7 +88,6 @@ describe("Auth Service", () => { uuid.mockImplementation(() => state) jwtUtils.verifyToken.mockImplementation(() => ({ state })) jwtUtils.encryptToken.mockImplementation(() => token) - jwtUtils.signToken.mockImplementation(() => signedToken) axios.post.mockImplementation(() => ({ data: `access_token=${accessToken}`, })) @@ -91,8 +98,8 @@ describe("Auth Service", () => { })) await expect( - service.getGithubAuthToken({ csrfState, code: "code", state }) - ).resolves.toEqual(signedToken) + service.getUserInfoFromGithubAuth({ csrfState, code: "code", state }) + ).resolves.toEqual(signedGithubToken) expect(axios.post).toHaveBeenCalledWith( "https://github.com/login/oauth/access_token", @@ -136,13 +143,13 @@ describe("Auth Service", () => { describe("verifyOtp", () => { const mockOtp = "123456" it("should be able to verify otp, login, and return token if correct", async () => { - mockUsersService.verifyOtp.mockImplementationOnce(() => true) - jwtUtils.signToken.mockImplementationOnce(() => signedToken) + mockUsersService.verifyEmailOtp.mockImplementationOnce(() => true) + jwtUtils.signToken.mockImplementationOnce(() => signedEmailToken) await expect( service.verifyOtp({ email: mockEmail, otp: mockOtp }) - ).resolves.toEqual(signedToken) - expect(mockUsersService.verifyOtp).toHaveBeenCalledWith( + ).resolves.toEqual(signedEmailToken) + expect(mockUsersService.verifyEmailOtp).toHaveBeenCalledWith( mockEmail, mockOtp ) @@ -150,12 +157,12 @@ describe("Auth Service", () => { }) it("should throw an error if otp is incorrect", async () => { - mockUsersService.verifyOtp.mockImplementationOnce(() => false) + mockUsersService.verifyEmailOtp.mockImplementationOnce(() => false) await expect( service.verifyOtp({ email: mockEmail, otp: mockOtp }) ).rejects.toThrow(BadRequestError) - expect(mockUsersService.verifyOtp).toHaveBeenCalledWith( + expect(mockUsersService.verifyEmailOtp).toHaveBeenCalledWith( mockEmail, mockOtp ) diff --git a/src/services/utilServices/__tests__/RateLimiter.spec.ts b/src/services/utilServices/__tests__/RateLimiter.spec.ts new file mode 100644 index 000000000..a301e2e49 --- /dev/null +++ b/src/services/utilServices/__tests__/RateLimiter.spec.ts @@ -0,0 +1,31 @@ +import express from "express" +import rateLimit from "express-rate-limit" +import request from "supertest" + +describe("rate limiting", () => { + // NOTE: There is a need to initialise another rate limiter + // as the rate limit library uses an in-memory store for each instance. + // This means that the requests made in another test would also impact the rate limit. + const mockRateLimiter = rateLimit({ + windowMs: 15 * 60 * 1000, + max: 1, + }) + const rateLimitedRouter = express() + rateLimitedRouter.use(mockRateLimiter) + rateLimitedRouter.get("/test", (req, res) => { + res.status(200).send() + }) + + it("should allow all the requests through when the number of requests made is below the limit of 1", async () => { + // Act + assert + await request(rateLimitedRouter).get("/test").expect(200) + }) + + it("should disallow the 101th request made within the 15 minute window", async () => { + // Act + const resp = await request(rateLimitedRouter).get(`/test`).expect(429) + + // Assert + expect(resp.text).toBe("Too many requests, please try again later.") + }) +}) diff --git a/src/tests/database.ts b/src/tests/database.ts index 222be294a..08bd04eeb 100644 --- a/src/tests/database.ts +++ b/src/tests/database.ts @@ -8,6 +8,7 @@ import { Whitelist, AccessToken, Repo, + Otp, Deployment, Launch, Redirection, @@ -30,6 +31,7 @@ sequelize.addModels([ Whitelist, AccessToken, Repo, + Otp, Deployment, Launch, Redirection, diff --git a/src/types/express/session.d.ts b/src/types/express/session.d.ts new file mode 100644 index 000000000..a77f0d2b8 --- /dev/null +++ b/src/types/express/session.d.ts @@ -0,0 +1,8 @@ +export interface SessionData { + userInfo: { + accessToken?: string + githubId?: string + isomerUserId: string + email: string + } +} diff --git a/src/utils/__tests__/markdown-utils.spec.ts b/src/utils/__tests__/markdown-utils.spec.ts new file mode 100644 index 000000000..c8ad8e7d7 --- /dev/null +++ b/src/utils/__tests__/markdown-utils.spec.ts @@ -0,0 +1,39 @@ +import { + retrieveDataFromMarkdown, + convertDataToMarkdown, +} from "@utils/markdown-utils" + +import { + normalMarkdownContent, + maliciousMarkdownContent, + normalJsonObject, + maliciousJsonObject, +} from "@fixtures/markdown-fixtures" + +describe("Sanitized markdown utils test", () => { + it("should parse normal markdown content into an object successfully", () => { + expect(retrieveDataFromMarkdown(normalMarkdownContent)).toStrictEqual( + normalJsonObject + ) + }) + + it("should parse malicious markdown content into a sanitized object successfully", () => { + expect(retrieveDataFromMarkdown(maliciousMarkdownContent)).toStrictEqual( + normalJsonObject + ) + }) + + it("should stringify a normal JSON object into markdown content successfully", () => { + const { frontMatter, pageContent } = normalJsonObject + expect(convertDataToMarkdown(frontMatter, pageContent)).toBe( + normalMarkdownContent + ) + }) + + it("should stringify a malicious JSON object into sanitized markdown content successfully", () => { + const { frontMatter, pageContent } = maliciousJsonObject + expect(convertDataToMarkdown(frontMatter, pageContent)).toBe( + normalMarkdownContent + ) + }) +}) diff --git a/src/utils/__tests__/yaml-utils.spec.ts b/src/utils/__tests__/yaml-utils.spec.ts new file mode 100644 index 000000000..472099fcd --- /dev/null +++ b/src/utils/__tests__/yaml-utils.spec.ts @@ -0,0 +1,28 @@ +import { sanitizedYamlParse, sanitizedYamlStringify } from "@utils/yaml-utils" + +import { + normalYamlString, + maliciousYamlString, + normalYamlObject, + maliciousYamlObject, +} from "@fixtures/yaml-fixtures" + +describe("Sanitized yaml utils test", () => { + it("should parse a normal string into yaml content successfully", () => { + expect(sanitizedYamlParse(normalYamlString)).toStrictEqual(normalYamlObject) + }) + + it("should parse a malicious string into sanitized yaml content successfully", () => { + expect(sanitizedYamlParse(maliciousYamlString)).toStrictEqual( + normalYamlObject + ) + }) + + it("should stringify normal yaml content into a string successfully", () => { + expect(sanitizedYamlStringify(normalYamlObject)).toBe(normalYamlString) + }) + + it("should stringify malicious yaml content into a string successfully", () => { + expect(sanitizedYamlStringify(maliciousYamlObject)).toBe(normalYamlString) + }) +}) diff --git a/src/utils/file-upload-utils.js b/src/utils/file-upload-utils.js index dbee488b2..3af790b8a 100644 --- a/src/utils/file-upload-utils.js +++ b/src/utils/file-upload-utils.js @@ -1,10 +1,12 @@ -const createDOMPurify = require("dompurify") +import logger from "@logger/logger" + +const { CLOUDMERSIVE_API_KEY } = process.env +const CloudmersiveVirusApiClient = require("cloudmersive-virus-api-client") const FileType = require("file-type") const isSvg = require("is-svg") -const { JSDOM } = require("jsdom") +const DOMPurify = require("isomorphic-dompurify") -const { window } = new JSDOM("") -const DOMPurify = createDOMPurify(window) +const { BaseIsomerError } = require("@errors/BaseError") const ALLOWED_FILE_EXTENSIONS = [ "pdf", @@ -15,9 +17,37 @@ const ALLOWED_FILE_EXTENSIONS = [ "bmp", "ico", ] +const defaultCloudmersiveClient = CloudmersiveVirusApiClient.ApiClient.instance + +// Configure API key authorization: Apikey +const apikey = defaultCloudmersiveClient.authentications.Apikey +apikey.apiKey = CLOUDMERSIVE_API_KEY + +const apiInstance = new CloudmersiveVirusApiClient.ScanApi() + +const scanFileForVirus = (fileBuffer) => + new Promise((success, failure) => { + // check if the api key is missing in the env + if (!CLOUDMERSIVE_API_KEY) { + logger.error("Cloudmersive API Key is missing in env") + throw new BaseIsomerError(500, "Internal Server Error") + } + + apiInstance.scanFile(fileBuffer, (error, data) => { + if (error) { + logger.error( + `Error when calling Cloudmersive Virus Scan API: ${error.message}` + ) + failure(error) + } else { + logger.info("Cloudmersive Virus Scan API called successfully") + success(data) + } + }) + }) const validateAndSanitizeFileUpload = async (data) => { - const [schema, content] = data.split(",") + const [, content] = data.split(",") const fileBuffer = Buffer.from(content, "base64") const detectedFileType = await FileType.fromBuffer(fileBuffer) @@ -35,4 +65,4 @@ const validateAndSanitizeFileUpload = async (data) => { return undefined } -module.exports = { validateAndSanitizeFileUpload, ALLOWED_FILE_EXTENSIONS } +module.exports = { validateAndSanitizeFileUpload, scanFileForVirus, ALLOWED_FILE_EXTENSIONS } diff --git a/src/utils/markdown-utils.js b/src/utils/markdown-utils.js index 5f1e8cdae..1b1d317d2 100644 --- a/src/utils/markdown-utils.js +++ b/src/utils/markdown-utils.js @@ -1,14 +1,21 @@ +const DOMPurify = require("isomorphic-dompurify") const _ = require("lodash") -const yaml = require("yaml") + +const { + sanitizedYamlParse, + sanitizedYamlStringify, +} = require("@utils/yaml-utils") const getTrailingSlashWithPermalink = (permalink) => permalink.endsWith("/") ? permalink : `${permalink}/` const retrieveDataFromMarkdown = (fileContent) => { // eslint-disable-next-line no-unused-vars - const [unused, encodedFrontMatter, ...pageContent] = fileContent.split("---") - const frontMatter = yaml.parse(encodedFrontMatter) - return { frontMatter, pageContent: pageContent.join("---") } + const [unused, encodedFrontMatter, ...pageContent] = DOMPurify.sanitize( + fileContent + ).split("---") + const frontMatter = sanitizedYamlParse(encodedFrontMatter) + return { frontMatter, pageContent: pageContent.join("---").trim() } } const isResourceFileOrLink = (frontMatter) => { @@ -25,9 +32,10 @@ const convertDataToMarkdown = (originalFrontMatter, pageContent) => { if (permalink) { frontMatter.permalink = getTrailingSlashWithPermalink(permalink) } - const newFrontMatter = yaml.stringify(frontMatter) + const newFrontMatter = sanitizedYamlStringify(frontMatter) const newContent = ["---\n", newFrontMatter, "---\n", pageContent].join("") - return newContent + + return DOMPurify.sanitize(newContent) } module.exports = { diff --git a/src/utils/time-utils.ts b/src/utils/time-utils.ts new file mode 100644 index 000000000..8a0ab7a8a --- /dev/null +++ b/src/utils/time-utils.ts @@ -0,0 +1,2 @@ +export const milliSecondsToMinutes = (millis: number) => + Math.floor(millis / 60000) diff --git a/src/utils/yaml-utils.ts b/src/utils/yaml-utils.ts new file mode 100644 index 000000000..55f2da3ba --- /dev/null +++ b/src/utils/yaml-utils.ts @@ -0,0 +1,11 @@ +import DOMPurify from "isomorphic-dompurify" +import yaml from "yaml" + +// Note: `yaml.parse()` and `yaml.stringify()` should not be used anywhere +// else in the codebase. +export const sanitizedYamlParse = ( + unparsedContent: string +): Record => yaml.parse(DOMPurify.sanitize(unparsedContent)) + +export const sanitizedYamlStringify = (prestringifiedContent: object): string => + DOMPurify.sanitize(yaml.stringify(prestringifiedContent))