diff --git a/Dockerfile b/Dockerfile index ecf7a60..934e486 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,4 +7,4 @@ COPY package*.json ./ COPY pickup/package.json ./pickup/package.json RUN npm ci -w pickup --no-audit COPY . . -CMD [ "npm", "start", "-w", "pickup" ] \ No newline at end of file +CMD [ "npm", "start", "-w", "pickup", "--silent"] \ No newline at end of file diff --git a/README.md b/README.md index 07bd6bd..a706506 100644 --- a/README.md +++ b/README.md @@ -74,6 +74,41 @@ Find the status of a pin } ``` +## Environment + +Set the following in the pickup worker env to tune it's behavior + +### `MAX_CAR_BYTES` + +Maximum bytes size of a CAR that pickup will fetch. Caps the anmount of data we will pull in a single job. + +**default: 31 GiB** _(33,285,996,544 bytes)_ + +### `FETCH_TIMEOUT_MS` + +How long to wait for fetching a CAR before failing the job. Caps the amount of time we spend on a job. + +**default: 4 hrs** + +_2/3rs of home internet users can upload faster than 20Mbit/s (fixed broadband), at which 32GiB would transfer in 3.5hrs._ + +see: https://www.speedtest.net/global-index +see: https://www.omnicalculator.com/other/download-time?c=GBP&v=fileSize:32!gigabyte,downloadSpeed:5!megabit + +### `FETCH_CHUNK_TIMEOUT_MS` + +How long to wait between chunks of data before failing a CAR. Limit the amount of time we spend waiting of a stalled fetch. + +**default: 2 min** + +### `BATCH_SIZE` + +How many pin requests to handle concurrently per worker. + +Used to set both the concurrency per worker *and* the max number of messages each worker fetches from the queue in a single batch. + +**default: 10** + ## Getting Started PR's are deployed automatically to `https://.pickup.dag.haus`. The `main` branch is deployed to https://staging.pickup.dag.haus and staging builds are promoted to prod manually via the UI at https://console.seed.run/dag-house/pickup diff --git a/package-lock.json b/package-lock.json index 1f52b80..1dd2639 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6568,6 +6568,15 @@ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "node_modules/@eslint/js": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.35.0.tgz", + "integrity": "sha512-JXdzbRiWclLVoD8sNUjR443VVlYqiYmDVT6rGUEIEHU5YJW0gaVZwV2xgM7D4arkvASqD0IlLUVjHiFuxaftRw==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/@gar/promisify": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", @@ -6628,6 +6637,19 @@ "node": ">=10.10.0" } }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, "node_modules/@humanwhocodes/object-schema": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", @@ -8133,6 +8155,7 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dev": true, "dependencies": { "retry": "0.13.1" } @@ -8941,6 +8964,15 @@ "node": ">=10.0.0" } }, + "node_modules/builtins": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", + "integrity": "sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==", + "dev": true, + "dependencies": { + "semver": "^7.0.0" + } + }, "node_modules/busboy": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", @@ -10885,6 +10917,74 @@ "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "dev": true }, + "node_modules/eslint-plugin-n": { + "version": "15.6.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.6.1.tgz", + "integrity": "sha512-R9xw9OtCRxxaxaszTQmQAlPgM+RdGjaL1akWuY/Fv9fRAi8Wj4CUKc6iYVG8QNRjRuo8/BqVYIpfqberJUEacA==", + "dev": true, + "dependencies": { + "builtins": "^5.0.1", + "eslint-plugin-es": "^4.1.0", + "eslint-utils": "^3.0.0", + "ignore": "^5.1.1", + "is-core-module": "^2.11.0", + "minimatch": "^3.1.2", + "resolve": "^1.22.1", + "semver": "^7.3.8" + }, + "engines": { + "node": ">=12.22.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=7.0.0" + } + }, + "node_modules/eslint-plugin-n/node_modules/eslint-plugin-es": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", + "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", + "dev": true, + "dependencies": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "engines": { + "node": ">=8.10.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=4.19.1" + } + }, + "node_modules/eslint-plugin-n/node_modules/eslint-plugin-es/node_modules/eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/eslint-plugin-n/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/eslint-plugin-node": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", @@ -11248,9 +11348,9 @@ } }, "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, "dependencies": { "estraverse": "^5.1.0" @@ -11891,9 +11991,9 @@ } }, "node_modules/globals": { - "version": "13.18.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.18.0.tgz", - "integrity": "sha512-/mR4KI8Ps2spmoc0Ulu9L7agOF0du1CZNQ3dke8yItYlyKNmGrkONemBbd6V8UTc1Wgcqn21t3WYB7dbRmh6/A==", + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", "dev": true, "dependencies": { "type-fest": "^0.20.2" @@ -11942,6 +12042,12 @@ "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", "dev": true }, + "node_modules/grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, "node_modules/graphql": { "version": "16.6.0", "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.6.0.tgz", @@ -13219,6 +13325,16 @@ "url": "https://github.com/sponsors/panva" } }, + "node_modules/js-sdsl": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz", + "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/js-sdsl" + } + }, "node_modules/js-string-escape": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", @@ -13486,6 +13602,11 @@ "varint": "^6.0.0" } }, + "node_modules/linked-list": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/linked-list/-/linked-list-2.1.0.tgz", + "integrity": "sha512-0GK/ylO6e5cv1PCOIdTRHxOaCgQ+0jKwHt+cHzkiCAZlx0KM5Id1bBAPad6g2mkvBNp1pNdmG0cohFGfqjkv9A==" + }, "node_modules/load-json-file": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-7.0.1.tgz", @@ -16257,6 +16378,66 @@ "aws-sdk": "^2.1114.0" } }, + "node_modules/squiss-ts": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/squiss-ts/-/squiss-ts-4.4.1.tgz", + "integrity": "sha512-mLZ8ppMJUr7Zn2LS0extvY+E6oDMdZKgZy10epTIGau9qWvwihCnXwb1LifgT6qNvdH7jqxAB/7mgRAV8DEkIA==", + "hasInstallScript": true, + "dependencies": { + "aws-sdk": "2.814.0", + "linked-list": "^2.1.0", + "semver": "6.3.0", + "ts-type-guards": "^0.7.0", + "uuid": "^8.3.2" + }, + "engines": { + "node": "^6 || ^8 || ^10 || ^12 || ^14 || ^16 || ^18" + } + }, + "node_modules/squiss-ts/node_modules/aws-sdk": { + "version": "2.814.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.814.0.tgz", + "integrity": "sha512-empd1m/J/MAkL6d9OeRpmg9thobULu0wk4v8W3JToaxGi2TD7PIdvE6yliZKyOVAdJINhBWEBhxR4OUIHhcGbQ==", + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.3.2", + "xml2js": "0.4.19" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/squiss-ts/node_modules/aws-sdk/node_modules/uuid": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/squiss-ts/node_modules/jmespath": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/squiss-ts/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/ssh-remote-port-forward": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/ssh-remote-port-forward/-/ssh-remote-port-forward-1.0.4.tgz", @@ -17691,6 +17872,11 @@ "node": ">=6" } }, + "node_modules/ts-type-guards": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/ts-type-guards/-/ts-type-guards-0.7.0.tgz", + "integrity": "sha512-f+IUliQzU09E8Ml5JwL+sxWenrebphNhy0LaCmkwACOvliuB7LxuAXUrAVfjwF+2PX5yMs9hfydBPR5lIIuUCA==" + }, "node_modules/tsconfig-paths": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", @@ -18469,146 +18655,641 @@ "@aws-sdk/lib-dynamodb": "^3.112.0", "@aws-sdk/lib-storage": "^3.97.0", "@multiformats/multiaddr": "^11.4.0", - "async-retry": "1.3.3", "debounce": "^1.2.1", "multiaddr": "^10.0.1", "multiformats": "^9.6.5", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", - "sqs-consumer": "^5.7.0" + "squiss-ts": "^4.4.1" }, "devDependencies": { "@aws-sdk/client-sqs": "^3.118.1", "@web-std/blob": "^3.0.4", "ava": "^4.3.0", "ipfs-car": "^0.7.0", - "standard": "^16.0.4", + "standard": "^17.0.0", "testcontainers": "^8.10.1" }, "engines": { "node": ">=16.14" } }, - "validator": { - "version": "1.0.1", - "license": "MIT", + "pickup/node_modules/@eslint/eslintrc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.0.tgz", + "integrity": "sha512-fluIaaV+GyV24CCu/ggiHdV+j4RNh85yQnAYS/G2mZODZgGmmlrgCydjUcV3YvxCm9x8nMAfThsqTni4KiXT4A==", + "dev": true, "dependencies": { - "@aws-sdk/client-s3": "3.258.0", - "@aws-sdk/lib-dynamodb": "^3.112.0", - "@aws-sdk/lib-storage": "^3.97.0", - "@ipld/car": "5.1.0", - "async-retry": "1.3.3", - "debounce": "^1.2.1", - "linkdex": "2.0.0", - "multiaddr": "10.0.1", - "multiformats": "11.0.1", - "node-fetch": "^3.2.10", - "pino": "8.8.0", - "sqs-consumer": "^5.7.0" - }, - "devDependencies": { - "@aws-sdk/client-sqs": "^3.118.1", - "@web-std/blob": "^3.0.4", - "ava": "^4.3.0", - "ipfs-car": "^0.7.0", - "standard": "^16.0.4", - "testcontainers": "^8.10.1" + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.4.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" }, "engines": { - "node": ">=16.14" + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" } }, - "validator/node_modules/@ipld/car": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@ipld/car/-/car-5.1.0.tgz", - "integrity": "sha512-k9pO0YqJvmFGY5pJDhF2Ocz+mRp3C3r4ikr1NrUXkzN/z4JzhE7XbQzUCcm7daq8k4tRqap0fWPjxZwjS9PUcQ==", + "pickup/node_modules/@humanwhocodes/config-array": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", + "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "dev": true, "dependencies": { - "@ipld/dag-cbor": "^9.0.0", - "cborg": "^1.9.0", - "multiformats": "^11.0.0", - "varint": "^6.0.0" + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" }, "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" + "node": ">=10.10.0" } }, - "validator/node_modules/@ipld/dag-cbor": { - "version": "9.0.0", - "resolved": "https://registry.npmjs.org/@ipld/dag-cbor/-/dag-cbor-9.0.0.tgz", - "integrity": "sha512-zdsiSiYDEOIDW7mmWOYWC9gukjXO+F8wqxz/LfN7iSwTfIyipC8+UQrCbPupFMRb/33XQTZk8yl3My8vUQBRoA==", + "pickup/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, "dependencies": { - "cborg": "^1.10.0", - "multiformats": "^11.0.0" + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" }, - "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" } }, - "validator/node_modules/multiformats": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-11.0.1.tgz", - "integrity": "sha512-atWruyH34YiknSdL5yeIir00EDlJRpHzELYQxG7Iy29eCyL+VrZHpPrX5yqlik3jnuqpLpRKVZ0SGVb9UzKaSA==", + "pickup/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "engines": { - "node": ">=16.0.0", - "npm": ">=7.0.0" - } - } - }, - "dependencies": { - "@apidevtools/json-schema-ref-parser": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.0.tgz", - "integrity": "sha512-teB30tFooE3iQs2HQIKJ02D8UZA1Xy1zaczzhUjJs0CymYxeC0g+y5rCY2p8NHBM6DBUVoR8rSM4kHLj1WE9mQ==", - "requires": { - "@jsdevtools/ono": "^7.1.3", - "@types/json-schema": "^7.0.6", - "call-me-maybe": "^1.0.1", - "js-yaml": "^4.1.0" + "node": ">=8" } }, - "@assemblyscript/loader": { - "version": "0.9.4", - "resolved": "https://registry.npmjs.org/@assemblyscript/loader/-/loader-0.9.4.tgz", - "integrity": "sha512-HazVq9zwTVwGmqdwYzu7WyQ6FQVZ7SwET0KKQuKm55jD0IfUpZgN0OPIiZG3zV1iSrVYcN0bdwLRXI/VNCYsUA==", - "dev": true - }, - "@aws-cdk/aws-apigatewayv2-alpha": { - "version": "2.50.0-alpha.0", - "resolved": "https://registry.npmjs.org/@aws-cdk/aws-apigatewayv2-alpha/-/aws-apigatewayv2-alpha-2.50.0-alpha.0.tgz", - "integrity": "sha512-dttWDqy+nTg/fD9y0egvj7/zdnOVEo0qyGsep1RV+p16R3F4ObMKyPVIg15fz57tK//Gp/i1QgXsZaSqbcWHOg==", - "requires": {} - }, - "@aws-crypto/crc32": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-3.0.0.tgz", - "integrity": "sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==", - "requires": { - "@aws-crypto/util": "^3.0.0", - "@aws-sdk/types": "^3.222.0", - "tslib": "^1.11.1" - }, + "pickup/node_modules/eslint": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.35.0.tgz", + "integrity": "sha512-BxAf1fVL7w+JLRQhWl2pzGeSiGqbWumV4WNvc9Rhp6tiCtm4oHnyPBSEtMGZwrQgudFQ+otqzWoPB7x+hxoWsw==", + "dev": true, "dependencies": { - "@aws-crypto/util": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-3.0.0.tgz", - "integrity": "sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==", - "requires": { - "@aws-sdk/types": "^3.222.0", - "@aws-sdk/util-utf8-browser": "^3.0.0", - "tslib": "^1.11.1" - } - }, - "@aws-sdk/types": { - "version": "3.257.0", - "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.257.0.tgz", - "integrity": "sha512-LmqXuBQBGeaGi/3Rp7XiEX1B5IPO2UUfBVvu0wwGqVsmstT0SbOVDZGPmxygACbm64n+PRx3uTSDefRfoiWYZg==", - "requires": { - "tslib": "^2.3.1" - }, - "dependencies": { - "tslib": { + "@eslint/eslintrc": "^2.0.0", + "@eslint/js": "8.35.0", + "@humanwhocodes/config-array": "^0.11.8", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.4.0", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "pickup/node_modules/eslint-config-standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.0.0.tgz", + "integrity": "sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peerDependencies": { + "eslint": "^8.0.1", + "eslint-plugin-import": "^2.25.2", + "eslint-plugin-n": "^15.0.0", + "eslint-plugin-promise": "^6.0.0" + } + }, + "pickup/node_modules/eslint-config-standard-jsx": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-11.0.0.tgz", + "integrity": "sha512-+1EV/R0JxEK1L0NGolAr8Iktm3Rgotx3BKwgaX+eAuSX8D952LULKtjgZD3F+e6SvibONnhLwoTi9DPxN5LvvQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peerDependencies": { + "eslint": "^8.8.0", + "eslint-plugin-react": "^7.28.0" + } + }, + "pickup/node_modules/eslint-plugin-promise": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", + "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^7.0.0 || ^8.0.0" + } + }, + "pickup/node_modules/eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "pickup/node_modules/espree": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz", + "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==", + "dev": true, + "dependencies": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "pickup/node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "pickup/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "pickup/node_modules/load-json-file": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-5.3.0.tgz", + "integrity": "sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.15", + "parse-json": "^4.0.0", + "pify": "^4.0.1", + "strip-bom": "^3.0.0", + "type-fest": "^0.3.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "pickup/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/pkg-conf": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-3.1.0.tgz", + "integrity": "sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ==", + "dev": true, + "dependencies": { + "find-up": "^3.0.0", + "load-json-file": "^5.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/pkg-conf/node_modules/find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "dependencies": { + "locate-path": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/pkg-conf/node_modules/locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "dependencies": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/pkg-conf/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "pickup/node_modules/pkg-conf/node_modules/p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "dependencies": { + "p-limit": "^2.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/pkg-conf/node_modules/path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "pickup/node_modules/standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/standard/-/standard-17.0.0.tgz", + "integrity": "sha512-GlCM9nzbLUkr+TYR5I2WQoIah4wHA2lMauqbyPLV/oI5gJxqhHzhjl9EG2N0lr/nRqI3KCbCvm/W3smxvLaChA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "eslint": "^8.13.0", + "eslint-config-standard": "17.0.0", + "eslint-config-standard-jsx": "^11.0.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-n": "^15.1.0", + "eslint-plugin-promise": "^6.0.0", + "eslint-plugin-react": "^7.28.0", + "standard-engine": "^15.0.0" + }, + "bin": { + "standard": "bin/cmd.js" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "pickup/node_modules/standard-engine": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/standard-engine/-/standard-engine-15.0.0.tgz", + "integrity": "sha512-4xwUhJNo1g/L2cleysUqUv7/btn7GEbYJvmgKrQ2vd/8pkTmN8cpqAZg+BT8Z1hNeEH787iWUdOpL8fmApLtxA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "get-stdin": "^8.0.0", + "minimist": "^1.2.6", + "pkg-conf": "^3.1.0", + "xdg-basedir": "^4.0.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, + "pickup/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "pickup/node_modules/type-fest": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.3.1.tgz", + "integrity": "sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "pickup/node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "validator": { + "version": "1.0.1", + "license": "MIT", + "dependencies": { + "@aws-sdk/client-s3": "3.258.0", + "@aws-sdk/lib-dynamodb": "^3.112.0", + "@aws-sdk/lib-storage": "^3.97.0", + "@ipld/car": "5.1.0", + "debounce": "^1.2.1", + "linkdex": "2.0.0", + "multiaddr": "10.0.1", + "multiformats": "11.0.1", + "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", + "pino": "8.8.0", + "sqs-consumer": "^5.7.0" + }, + "devDependencies": { + "@aws-sdk/client-sqs": "^3.118.1", + "@web-std/blob": "^3.0.4", + "ava": "^4.3.0", + "ipfs-car": "^0.7.0", + "standard": "^16.0.4", + "testcontainers": "^8.10.1" + }, + "engines": { + "node": ">=16.14" + } + }, + "validator/node_modules/@ipld/car": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/@ipld/car/-/car-5.1.0.tgz", + "integrity": "sha512-k9pO0YqJvmFGY5pJDhF2Ocz+mRp3C3r4ikr1NrUXkzN/z4JzhE7XbQzUCcm7daq8k4tRqap0fWPjxZwjS9PUcQ==", + "dependencies": { + "@ipld/dag-cbor": "^9.0.0", + "cborg": "^1.9.0", + "multiformats": "^11.0.0", + "varint": "^6.0.0" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, + "validator/node_modules/@ipld/dag-cbor": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/@ipld/dag-cbor/-/dag-cbor-9.0.0.tgz", + "integrity": "sha512-zdsiSiYDEOIDW7mmWOYWC9gukjXO+F8wqxz/LfN7iSwTfIyipC8+UQrCbPupFMRb/33XQTZk8yl3My8vUQBRoA==", + "dependencies": { + "cborg": "^1.10.0", + "multiformats": "^11.0.0" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, + "validator/node_modules/multiformats": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-11.0.1.tgz", + "integrity": "sha512-atWruyH34YiknSdL5yeIir00EDlJRpHzELYQxG7Iy29eCyL+VrZHpPrX5yqlik3jnuqpLpRKVZ0SGVb9UzKaSA==", + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + } + }, + "dependencies": { + "@apidevtools/json-schema-ref-parser": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.1.0.tgz", + "integrity": "sha512-teB30tFooE3iQs2HQIKJ02D8UZA1Xy1zaczzhUjJs0CymYxeC0g+y5rCY2p8NHBM6DBUVoR8rSM4kHLj1WE9mQ==", + "requires": { + "@jsdevtools/ono": "^7.1.3", + "@types/json-schema": "^7.0.6", + "call-me-maybe": "^1.0.1", + "js-yaml": "^4.1.0" + } + }, + "@assemblyscript/loader": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/@assemblyscript/loader/-/loader-0.9.4.tgz", + "integrity": "sha512-HazVq9zwTVwGmqdwYzu7WyQ6FQVZ7SwET0KKQuKm55jD0IfUpZgN0OPIiZG3zV1iSrVYcN0bdwLRXI/VNCYsUA==", + "dev": true + }, + "@aws-cdk/aws-apigatewayv2-alpha": { + "version": "2.50.0-alpha.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-apigatewayv2-alpha/-/aws-apigatewayv2-alpha-2.50.0-alpha.0.tgz", + "integrity": "sha512-dttWDqy+nTg/fD9y0egvj7/zdnOVEo0qyGsep1RV+p16R3F4ObMKyPVIg15fz57tK//Gp/i1QgXsZaSqbcWHOg==", + "requires": {} + }, + "@aws-crypto/crc32": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/crc32/-/crc32-3.0.0.tgz", + "integrity": "sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==", + "requires": { + "@aws-crypto/util": "^3.0.0", + "@aws-sdk/types": "^3.222.0", + "tslib": "^1.11.1" + }, + "dependencies": { + "@aws-crypto/util": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@aws-crypto/util/-/util-3.0.0.tgz", + "integrity": "sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==", + "requires": { + "@aws-sdk/types": "^3.222.0", + "@aws-sdk/util-utf8-browser": "^3.0.0", + "tslib": "^1.11.1" + } + }, + "@aws-sdk/types": { + "version": "3.257.0", + "resolved": "https://registry.npmjs.org/@aws-sdk/types/-/types-3.257.0.tgz", + "integrity": "sha512-LmqXuBQBGeaGi/3Rp7XiEX1B5IPO2UUfBVvu0wwGqVsmstT0SbOVDZGPmxygACbm64n+PRx3uTSDefRfoiWYZg==", + "requires": { + "tslib": "^2.3.1" + }, + "dependencies": { + "tslib": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.5.0.tgz", "integrity": "sha512-336iVw3rtn2BUK7ORdIAHTyxHGRIHVReokCR3XjbckJMK7ms8FysBfhLR8IXnAgy7T0PTPNBWKiH514FOW/WSg==" @@ -23897,6 +24578,12 @@ } } }, + "@eslint/js": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.35.0.tgz", + "integrity": "sha512-JXdzbRiWclLVoD8sNUjR443VVlYqiYmDVT6rGUEIEHU5YJW0gaVZwV2xgM7D4arkvASqD0IlLUVjHiFuxaftRw==", + "dev": true + }, "@gar/promisify": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", @@ -23945,6 +24632,12 @@ "minimatch": "^3.0.4" } }, + "@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true + }, "@humanwhocodes/object-schema": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", @@ -25120,6 +25813,7 @@ "version": "1.3.3", "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz", "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==", + "dev": true, "requires": { "retry": "0.13.1" } @@ -25724,6 +26418,15 @@ "dev": true, "optional": true }, + "builtins": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/builtins/-/builtins-5.0.1.tgz", + "integrity": "sha512-qwVpFEHNfhYJIzNRBvd2C1kyo6jz3ZSMPyyuR47OPdiKWlbYnZNyDWuyR175qDnAJLiCo5fBBqPb3RiXgWlkOQ==", + "dev": true, + "requires": { + "semver": "^7.0.0" + } + }, "busboy": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz", @@ -27184,6 +27887,51 @@ } } }, + "eslint-plugin-n": { + "version": "15.6.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-n/-/eslint-plugin-n-15.6.1.tgz", + "integrity": "sha512-R9xw9OtCRxxaxaszTQmQAlPgM+RdGjaL1akWuY/Fv9fRAi8Wj4CUKc6iYVG8QNRjRuo8/BqVYIpfqberJUEacA==", + "dev": true, + "requires": { + "builtins": "^5.0.1", + "eslint-plugin-es": "^4.1.0", + "eslint-utils": "^3.0.0", + "ignore": "^5.1.1", + "is-core-module": "^2.11.0", + "minimatch": "^3.1.2", + "resolve": "^1.22.1", + "semver": "^7.3.8" + }, + "dependencies": { + "eslint-plugin-es": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-4.1.0.tgz", + "integrity": "sha512-GILhQTnjYE2WorX5Jyi5i4dz5ALWxBIdQECVQavL6s7cI76IZTDWleTHkxz/QT3kvcs2QlGHvKLYsSlPOlPXnQ==", + "dev": true, + "requires": { + "eslint-utils": "^2.0.0", + "regexpp": "^3.0.0" + }, + "dependencies": { + "eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + } + } + }, + "eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true + } + } + }, "eslint-plugin-node": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz", @@ -27352,9 +28100,9 @@ "dev": true }, "esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, "requires": { "estraverse": "^5.1.0" @@ -27847,9 +28595,9 @@ } }, "globals": { - "version": "13.18.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.18.0.tgz", - "integrity": "sha512-/mR4KI8Ps2spmoc0Ulu9L7agOF0du1CZNQ3dke8yItYlyKNmGrkONemBbd6V8UTc1Wgcqn21t3WYB7dbRmh6/A==", + "version": "13.20.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.20.0.tgz", + "integrity": "sha512-Qg5QtVkCy/kv3FUSlu4ukeZDVf9ee0iXLAUYX13gbR17bnejFTzr4iS9bY7kwCf1NztRNm1t91fjOiyx4CSwPQ==", "dev": true, "requires": { "type-fest": "^0.20.2" @@ -27883,6 +28631,12 @@ "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", "dev": true }, + "grapheme-splitter": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz", + "integrity": "sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==", + "dev": true + }, "graphql": { "version": "16.6.0", "resolved": "https://registry.npmjs.org/graphql/-/graphql-16.6.0.tgz", @@ -28822,6 +29576,12 @@ "integrity": "sha512-YRv4Tk/Wlug8qicwqFNFVEZSdbROCHRAC6qu/i0dyNKr5JQdoa2pIGoS04lLO/jXQX7Z9omoNewYIVIxqZBd9Q==", "dev": true }, + "js-sdsl": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/js-sdsl/-/js-sdsl-4.3.0.tgz", + "integrity": "sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ==", + "dev": true + }, "js-string-escape": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/js-string-escape/-/js-string-escape-1.0.1.tgz", @@ -29037,6 +29797,11 @@ } } }, + "linked-list": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/linked-list/-/linked-list-2.1.0.tgz", + "integrity": "sha512-0GK/ylO6e5cv1PCOIdTRHxOaCgQ+0jKwHt+cHzkiCAZlx0KM5Id1bBAPad6g2mkvBNp1pNdmG0cohFGfqjkv9A==" + }, "load-json-file": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-7.0.1.tgz", @@ -30066,17 +30831,338 @@ "@aws-sdk/lib-storage": "^3.97.0", "@multiformats/multiaddr": "^11.4.0", "@web-std/blob": "^3.0.4", - "async-retry": "1.3.3", "ava": "^4.3.0", "debounce": "^1.2.1", "ipfs-car": "^0.7.0", "multiaddr": "^10.0.1", "multiformats": "^9.6.5", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", - "sqs-consumer": "^5.7.0", - "standard": "^16.0.4", + "squiss-ts": "^4.4.1", + "standard": "^17.0.0", "testcontainers": "^8.10.1" + }, + "dependencies": { + "@eslint/eslintrc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.0.tgz", + "integrity": "sha512-fluIaaV+GyV24CCu/ggiHdV+j4RNh85yQnAYS/G2mZODZgGmmlrgCydjUcV3YvxCm9x8nMAfThsqTni4KiXT4A==", + "dev": true, + "requires": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^9.4.0", + "globals": "^13.19.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + } + }, + "@humanwhocodes/config-array": { + "version": "0.11.8", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", + "integrity": "sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g==", + "dev": true, + "requires": { + "@humanwhocodes/object-schema": "^1.2.1", + "debug": "^4.1.1", + "minimatch": "^3.0.5" + } + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "eslint": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.35.0.tgz", + "integrity": "sha512-BxAf1fVL7w+JLRQhWl2pzGeSiGqbWumV4WNvc9Rhp6tiCtm4oHnyPBSEtMGZwrQgudFQ+otqzWoPB7x+hxoWsw==", + "dev": true, + "requires": { + "@eslint/eslintrc": "^2.0.0", + "@eslint/js": "8.35.0", + "@humanwhocodes/config-array": "^0.11.8", + "@humanwhocodes/module-importer": "^1.0.1", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "doctrine": "^3.0.0", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^7.1.1", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.3.0", + "espree": "^9.4.0", + "esquery": "^1.4.2", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "globals": "^13.19.0", + "grapheme-splitter": "^1.0.4", + "ignore": "^5.2.0", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "js-sdsl": "^4.1.4", + "js-yaml": "^4.1.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "regexpp": "^3.2.0", + "strip-ansi": "^6.0.1", + "strip-json-comments": "^3.1.0", + "text-table": "^0.2.0" + } + }, + "eslint-config-standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard/-/eslint-config-standard-17.0.0.tgz", + "integrity": "sha512-/2ks1GKyqSOkH7JFvXJicu0iMpoojkwB+f5Du/1SC0PtBL+s8v30k9njRZ21pm2drKYm2342jFnGWzttxPmZVg==", + "dev": true, + "requires": {} + }, + "eslint-config-standard-jsx": { + "version": "11.0.0", + "resolved": "https://registry.npmjs.org/eslint-config-standard-jsx/-/eslint-config-standard-jsx-11.0.0.tgz", + "integrity": "sha512-+1EV/R0JxEK1L0NGolAr8Iktm3Rgotx3BKwgaX+eAuSX8D952LULKtjgZD3F+e6SvibONnhLwoTi9DPxN5LvvQ==", + "dev": true, + "requires": {} + }, + "eslint-plugin-promise": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.1.1.tgz", + "integrity": "sha512-tjqWDwVZQo7UIPMeDReOpUgHCmCiH+ePnVT+5zVapL0uuHnegBUs2smM13CzOs2Xb5+MHMRFTs9v24yjba4Oig==", + "dev": true, + "requires": {} + }, + "eslint-scope": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.1.tgz", + "integrity": "sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw==", + "dev": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + } + }, + "espree": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.4.1.tgz", + "integrity": "sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg==", + "dev": true, + "requires": { + "acorn": "^8.8.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^3.3.0" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "requires": { + "is-glob": "^4.0.3" + } + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "load-json-file": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-5.3.0.tgz", + "integrity": "sha512-cJGP40Jc/VXUsp8/OrnyKyTZ1y6v/dphm3bioS+RrKXjK2BB6wHUd6JptZEFDGgGahMT+InnZO5i1Ei9mpC8Bw==", + "dev": true, + "requires": { + "graceful-fs": "^4.1.15", + "parse-json": "^4.0.0", + "pify": "^4.0.1", + "strip-bom": "^3.0.0", + "type-fest": "^0.3.0" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "requires": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + } + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "pkg-conf": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/pkg-conf/-/pkg-conf-3.1.0.tgz", + "integrity": "sha512-m0OTbR/5VPNPqO1ph6Fqbj7Hv6QU7gR/tQW40ZqrL1rjgCU85W6C1bJn0BItuJqnR98PWzw7Z8hHeChD1WrgdQ==", + "dev": true, + "requires": { + "find-up": "^3.0.0", + "load-json-file": "^5.2.0" + }, + "dependencies": { + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "dev": true, + "requires": { + "locate-path": "^3.0.0" + } + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "dev": true, + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "dev": true, + "requires": { + "p-limit": "^2.0.0" + } + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha512-bpC7GYwiDYQ4wYLe+FA8lhRjhQCMcQGuSgGGqDkg/QerRWw9CmGRT0iSOVRSZJ29NMLZgIzqaljJ63oaL4NIJQ==", + "dev": true + } + } + }, + "standard": { + "version": "17.0.0", + "resolved": "https://registry.npmjs.org/standard/-/standard-17.0.0.tgz", + "integrity": "sha512-GlCM9nzbLUkr+TYR5I2WQoIah4wHA2lMauqbyPLV/oI5gJxqhHzhjl9EG2N0lr/nRqI3KCbCvm/W3smxvLaChA==", + "dev": true, + "requires": { + "eslint": "^8.13.0", + "eslint-config-standard": "17.0.0", + "eslint-config-standard-jsx": "^11.0.0", + "eslint-plugin-import": "^2.26.0", + "eslint-plugin-n": "^15.1.0", + "eslint-plugin-promise": "^6.0.0", + "eslint-plugin-react": "^7.28.0", + "standard-engine": "^15.0.0" + } + }, + "standard-engine": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/standard-engine/-/standard-engine-15.0.0.tgz", + "integrity": "sha512-4xwUhJNo1g/L2cleysUqUv7/btn7GEbYJvmgKrQ2vd/8pkTmN8cpqAZg+BT8Z1hNeEH787iWUdOpL8fmApLtxA==", + "dev": true, + "requires": { + "get-stdin": "^8.0.0", + "minimist": "^1.2.6", + "pkg-conf": "^3.1.0", + "xdg-basedir": "^4.0.0" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "type-fest": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.3.1.tgz", + "integrity": "sha512-cUGJnCdr4STbePCgqNFbpVNCepa+kAVohJs1sLhxzdH+gnEoOd8VhbYa7pD3zZYGiURWM2xzEII3fQcRizDkYQ==", + "dev": true + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } } }, "pickup-api": { @@ -30096,7 +31182,7 @@ "nanoid": "^4.0.0", "node-fetch": "^3.3.0", "openapi-backend": "^5.3.0", - "p-retry": "*", + "p-retry": "^5.1.2", "pino-lambda": "4.1.0" }, "dependencies": { @@ -31124,6 +32210,53 @@ "debug": "^4.3.1" } }, + "squiss-ts": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/squiss-ts/-/squiss-ts-4.4.1.tgz", + "integrity": "sha512-mLZ8ppMJUr7Zn2LS0extvY+E6oDMdZKgZy10epTIGau9qWvwihCnXwb1LifgT6qNvdH7jqxAB/7mgRAV8DEkIA==", + "requires": { + "aws-sdk": "2.814.0", + "linked-list": "^2.1.0", + "semver": "6.3.0", + "ts-type-guards": "^0.7.0", + "uuid": "^8.3.2" + }, + "dependencies": { + "aws-sdk": { + "version": "2.814.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.814.0.tgz", + "integrity": "sha512-empd1m/J/MAkL6d9OeRpmg9thobULu0wk4v8W3JToaxGi2TD7PIdvE6yliZKyOVAdJINhBWEBhxR4OUIHhcGbQ==", + "requires": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.3.2", + "xml2js": "0.4.19" + }, + "dependencies": { + "uuid": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" + } + } + }, + "jmespath": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w==" + }, + "semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==" + } + } + }, "ssh-remote-port-forward": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/ssh-remote-port-forward/-/ssh-remote-port-forward-1.0.4.tgz", @@ -32222,6 +33355,11 @@ } } }, + "ts-type-guards": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/ts-type-guards/-/ts-type-guards-0.7.0.tgz", + "integrity": "sha512-f+IUliQzU09E8Ml5JwL+sxWenrebphNhy0LaCmkwACOvliuB7LxuAXUrAVfjwF+2PX5yMs9hfydBPR5lIIuUCA==" + }, "tsconfig-paths": { "version": "3.14.1", "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.14.1.tgz", @@ -32434,7 +33572,6 @@ "@aws-sdk/lib-storage": "^3.97.0", "@ipld/car": "5.1.0", "@web-std/blob": "^3.0.4", - "async-retry": "1.3.3", "ava": "^4.3.0", "debounce": "^1.2.1", "ipfs-car": "^0.7.0", @@ -32442,6 +33579,7 @@ "multiaddr": "10.0.1", "multiformats": "11.0.1", "node-fetch": "^3.2.10", + "p-retry": "*", "pino": "8.8.0", "sqs-consumer": "^5.7.0", "standard": "^16.0.4", diff --git a/pickup/index.js b/pickup/index.js index 5a5ec86..bc4eabf 100644 --- a/pickup/index.js +++ b/pickup/index.js @@ -1,44 +1,16 @@ -import { createConsumer } from './lib/consumer.js' +import { createPickupFromEnv } from './lib/pickup.js' import { logger } from './lib/logger.js' -import { DownloadStatusManager } from './lib/downloadStatusManager.js' - -const { - IPFS_API_URL, - SQS_QUEUE_URL, - DYNAMO_TABLE_NAME, - DYNAMO_DB_ENDPOINT, - BATCH_SIZE, - MAX_RETRY, - TIMEOUT_FETCH, - LOG_STATE_EVERY_SECONDS, - VALIDATION_BUCKET -} = process.env - -if (!IPFS_API_URL) throw new Error('IPFS_API_URL not found in ENV') -if (!SQS_QUEUE_URL) throw new Error('SQS_QUEUE_URL not found in ENV') -if (!DYNAMO_TABLE_NAME) throw new Error('DYNAMO_TABLE_NAME not found in ENV') async function start () { - logger.info({}, 'Pickup starting...') - const app = await createConsumer({ - ipfsApiUrl: IPFS_API_URL, - queueUrl: SQS_QUEUE_URL, - dynamoTable: DYNAMO_TABLE_NAME, - dynamoEndpoint: DYNAMO_DB_ENDPOINT || undefined, - validationBucket: VALIDATION_BUCKET || undefined, - batchSize: Number(BATCH_SIZE || 1), - maxRetry: Number(MAX_RETRY || 5), - timeoutFetchMs: Number(TIMEOUT_FETCH || 30) * 1000, - downloadStatusManager: new DownloadStatusManager(), - downloadStatusLoggerSeconds: Math.max(Number(LOG_STATE_EVERY_SECONDS) || 300, 60) - }) - - app.on('message_received', msg => { - const { requestid, cid } = JSON.parse(msg.Body) - logger.info({ requestid, cid }, 'Processing request') - }) - app.start() - logger.info({}, `Pickup subscribed to ${SQS_QUEUE_URL}`) + try { + const pickup = createPickupFromEnv() + logger.info('Pickup starting...') + await pickup.start() + logger.info('Pickup started') + } catch (err) { + logger.error(err, 'Pickup ded!') + throw err + } } start() diff --git a/pickup/lib/consumer.js b/pickup/lib/consumer.js deleted file mode 100644 index e16e1c4..0000000 --- a/pickup/lib/consumer.js +++ /dev/null @@ -1,153 +0,0 @@ -import retry from 'async-retry' -import { Consumer } from 'sqs-consumer' -import { DynamoDBClient } from '@aws-sdk/client-dynamodb' - -import { createS3Uploader } from './s3.js' -import { testIpfsApi } from './ipfs.js' -import { pickupBatch } from './pickupBatch.js' -import { logger } from './logger.js' - -/** - * Delete a message from ths SQS queue - * - * @param {import('@aws-sdk/client-sqs'.SQSClient)} opts.sqs - The SQS client - * @param string queueUrl - The Sqs Queue URL - * @param {import('@aws-sdk/client-sqs'.Message)} opts.message - The SQS message - * @returns {Promise} - */ -export async function deleteMessage ({ sqs, queueUrl }, message) { - const deleteParams = { - QueueUrl: queueUrl, - ReceiptHandle: message.ReceiptHandle - } - try { - await sqs - .deleteMessage(deleteParams) - .promise() - } catch (err) { - logger.error({ err }, 'SQS delete message failed') - throw err - } -} - -/** - * Create the consumer for the SQS queue. - * - * @param {string} ipfsApiUrl - The URL of the IPFS server. - * @param {string} queueUrl - The Sqs Queue URL. - * @param {import('@aws-sdk/client-s3'.S3Client)} s3 - The S3 client. - * @param {number} batchSize - The size of the concurrent batch. - * @param {number} maxRetry - The number of max retry before set the pin request to failed. - * @param {number} visibilityTimeout - The message visibility timeout in seconds, used internally by sqs-consumer. - * @param {number} heartbeatInterval - The message heartbeatInterval in seconds, used internally by sqs-consumer. - * @param {number} handleMessageTimeout - The max limit for the car download in milliseconds, used internally by sqs-consumer. - * @param {number} testMaxRetry - The max retry to check if the IPFS server is available. - * @param {number} testTimeoutMs - The timeout in millisecond for each IPFS availability try. - * @param {number} timeoutFetchMs - The timeout for each fetch in milliseconds. The Download is set to `failed` if the IPFS server - * fetch action do not respond while is downloading the blocks. - * @param {string} dynamoTable - The dynamo DB table - * @param {string} dynamoEndpoint - The dynamo DB endpoint - * @param {string} validationBucket - The s3 bucket for the validation, if exists replace the save bucket - * @param {DownloadStatusManager} downloadStatusManager - * @param {Number} downloadStatusLoggerSeconds - The interval in seconds for the download state - * @returns {Promise} - */ -export async function createConsumer ({ - ipfsApiUrl, - queueUrl, - s3, - batchSize = 10, - maxRetry = 5, - visibilityTimeout = 20, - heartbeatInterval = 10, - handleMessageTimeout = 4 * 60 * 60 * 1000, - testMaxRetry = 5, - testTimeoutMs = 10000, - timeoutFetchMs = 30000, - dynamoTable, - dynamoEndpoint, - validationBucket, - downloadStatusManager, - downloadStatusLoggerSeconds = 300 // logs every 5 minutes -}) { - // throws if can't connect - await retry(() => { - return testIpfsApi(ipfsApiUrl, testTimeoutMs) - }, { retries: testMaxRetry }) - - const dynamo = new DynamoDBClient({ endpoint: dynamoEndpoint }) - - logger.info({ - batchSize, - visibilityTimeout, - heartbeatInterval, - queueUrl, - handleMessageTimeout, - maxRetry, - timeoutFetchMs, - validationBucket - }, 'Create sqs consumer') - - const app = Consumer.create({ - queueUrl, - // The message deletion is managed manually - shouldDeleteMessages: false, - // needs partial acks before we can increase batch size - // see: https://github.com/bbc/sqs-consumer/pull/255 - batchSize, // 1 to 10 - visibilityTimeout, // seconds, how long to hide message from queue after reading. - heartbeatInterval, // seconds, must be lower than `visibilityTimeout`. how long before increasing the `visibilityTimeout` - attributeNames: ['ApproximateReceiveCount'], // log retries - // allow 4hrs before timeout. 2/3rs of the world can upload faster than - // 20Mbit/s (fixed broadband), at which 32GiB would transfer in 3.5hrs. - // we can make this more or less generous, but note it ties up a worker. - // see: https://www.speedtest.net/global-index - // see: https://www.omnicalculator.com/other/download-time?c=GBP&v=fileSize:32!gigabyte,downloadSpeed:5!megabit - // TODO: enforce 32GiB limit - handleMessageTimeout, // ms, error if processing takes longer than this. - handleMessageBatch: async (messages) => { - return pickupBatch(messages, { - ipfsApiUrl, - createS3Uploader, - s3, - queueManager: app, - dynamo, - dynamoTable, - validationBucket, - timeoutFetchMs, - maxRetry, - downloadStatusManager - }) - } - }) - - const downloadStatusLoggerInterval = setInterval(() => { - const status = downloadStatusManager.getStatus() - if (Object.keys(status).length) { - logger.info(status, 'DownloadStatus') - } - }, downloadStatusLoggerSeconds * 1000) - - downloadStatusLoggerInterval.unref() - - app.on('stopped', () => { - clearInterval(downloadStatusLoggerInterval) - }) - - app.on('error', (err) => { - if ( - (err.code === 'MissingParameter' || err.constructor.name === 'MissingParameter') && - err.message.includes('Error changing visibility timeout: The request must contain the parameter ChangeMessageVisibilityBatchRequestEntry')) { - logger.trace({ err }, 'The sqs-library is trying to change the visibility of the timeout of an empty message list') - return - } - - logger.error({ err }, 'App error') - }) - - app.on('processing_error', (err) => { - logger.error({ err }, 'App processing error') - }) - - return app -} diff --git a/pickup/lib/downloadStatusManager.js b/pickup/lib/downloadStatusManager.js deleted file mode 100644 index 44f64c4..0000000 --- a/pickup/lib/downloadStatusManager.js +++ /dev/null @@ -1,33 +0,0 @@ -export const STATE_QUEUED = 'queued' -export const STATE_DOWNLOADING = 'downloading' -export const STATE_FAILED = 'failed' -export const STATE_TIMEOUT = 'timeout' -export const STATE_DONE = 'done' - -export class DownloadStatusManager { - constructor () { - this.status = {} - } - - reset () { - this.status = {} - } - - setStatus (cid, state, size) { - this.status[cid] = { ...this.status[cid], state } - - if (size !== undefined) { - this.status[cid] = { ...this.status[cid], size } - } - } - - getStatus () { - return this.status - } - - isRunning () { - return Object.keys(this.status).length > 0 && !!Object.values(this.status).find(s => { - return !['done', 'failed', 'timeout'].includes(s.state) - }) - } -} diff --git a/pickup/lib/dynamo.js b/pickup/lib/dynamo.js deleted file mode 100644 index 483efac..0000000 --- a/pickup/lib/dynamo.js +++ /dev/null @@ -1,43 +0,0 @@ -import { UpdateCommand } from '@aws-sdk/lib-dynamodb' -import { logger } from './logger.js' - -/** - * Update the pin status for a given CID - * - * @param {import('@aws-sdk/lib-dynamodb'.DynamoDBClient)} dynamo - * @param {string} table - * @param {cid} string - * @param {string} status - * @param {string} error - */ -export async function updatePinStatus ({ dynamo, table, cid, status, error }) { - try { - logger.trace({ cid, status }, 'Dynamo try to update pin status') - - const command = { - TableName: table, - Key: { cid }, - ExpressionAttributeNames: { - '#status': 'status', - '#error': 'error', - '#downloadFailedAt': 'downloadFailedAt' - }, - ExpressionAttributeValues: { - ':s': status, - ':e': error || '', - ':df': new Date().toISOString() - }, - UpdateExpression: 'set #status = :s, #error = :e, #downloadFailedAt = :df', - ReturnValues: 'ALL_NEW' - } - - logger.trace({ cid, command }, 'Dynamo command') - const res = await dynamo.send(new UpdateCommand(command)) - - logger.trace({ res, cid, status }, 'Dynamo pin status updated') - return res.Attributes - } catch (err) { - logger.error({ cid, status }, 'Dynamo pin status error') - throw err - } -} diff --git a/pickup/lib/ipfs.js b/pickup/lib/ipfs.js index 298c093..fe55049 100644 --- a/pickup/lib/ipfs.js +++ b/pickup/lib/ipfs.js @@ -3,55 +3,30 @@ import { CID } from 'multiformats/cid' import { Multiaddr } from 'multiaddr' import debounce from 'debounce' import fetch from 'node-fetch' - +import retry from 'p-retry' import { logger } from './logger.js' -import { STATE_DOWNLOADING } from './downloadStatusManager.js' -export const ERROR_TIMEOUT = 'TIMEOUT' +/** @typedef {import('node:stream').Readable} Readable */ /** - * Start the fetch of a car + * Fetch a CAR from kubo * - * @param string cid - The CID requested - * @param string ipfsApiUrl - The IPFS server url - * @param object downloadError - The error object, is filled in if an error occurs - * @param int timeoutMs - The timeout for each block fetch in milliseconds. - * The Download is set to `failed` if the IPFS server - * fetch action do not respond while is downloading the blocks. - * @param {DownloadStatusManager} downloadStatusManager - * @returns {Promise<*>} + * @param {string} cid - The CID requested + * @param {string} ipfsApiUrl - The IPFS server url + * @param {AbortSignal} signal - Cancel the fetch + * @returns {Promise} */ -export async function fetchCar (cid, ipfsApiUrl, downloadError, timeoutMs = 30000, downloadStatusManager) { +export async function fetchCar ({ cid, ipfsApiUrl, signal }) { if (!isCID(cid)) { throw new Error({ message: `Invalid CID: ${cid}` }) } const url = new URL(`/api/v0/dag/export?arg=${cid}`, ipfsApiUrl) - const ctl = new AbortController() - - const startCountdown = debounce(() => { - downloadError.code = ERROR_TIMEOUT - ctl.abort() - }, timeoutMs) - startCountdown() - const res = await fetch(url, { method: 'POST', signal: ctl.signal }) + + const res = await fetch(url, { method: 'POST', signal }) if (!res.ok) { throw new Error(`${res.status} ${res.statusText} ${url}`) } - - let downloadSize = 0 - async function * restartCountdown (source) { - // startCountdown.clear() - // throw new Error('There was an error!!') - for await (const chunk of source) { - downloadSize += chunk.length - downloadStatusManager.setStatus(cid, STATE_DOWNLOADING, downloadSize) - startCountdown() - yield chunk - } - startCountdown.clear() - } - - return compose(res.body, restartCountdown) + return res.body } /** @@ -136,7 +111,7 @@ export function isCID (cid) { * Test the connection with IPFS server * @param {string} ipfsApiUrl * @param {number} timeoutMs - * @returns {Promise} + * @returns {Promise>} */ export async function testIpfsApi (ipfsApiUrl, timeoutMs = 10000) { const url = new URL('/api/v0/id', ipfsApiUrl) @@ -149,10 +124,98 @@ export async function testIpfsApi (ipfsApiUrl, timeoutMs = 10000) { } throw new Error(`IPFS API test failed. POST ${url} returned ${res.status} ${res.statusText}`) } - const { AgentVersion, ID } = await res.json() - logger.info({ agentVersion: AgentVersion, peerId: ID }, 'Connected') + return await res.json() } catch (err) { - logger.error({ err }, 'Test ipfs fail') throw new Error('IPFS API test failed.', { cause: err }) } } + +export const TOO_BIG = 'TOO_BIG' +export const FETCH_TOO_SLOW = 'FETCH_TOO_SLOW' +export const CHUNK_TOO_SLOW = 'CHUNK_TOO_SLOW' + +export class CarFetcher { + /** + * @param {object} config + * @param {string} config.ipfsApiUrl + * @param {number} config.maxCarBytes + * @param {number} config.fetchTimeoutMs + * @param {number} config.fetchChunkTimeoutMs + */ + constructor ({ + ipfsApiUrl = 'http://127.0.0.1:5001', + maxCarBytes = 31 * (1024 ** 3), /* 31 GiB */ + fetchTimeoutMs = 4 * 60 * 60 * 1000, /* 4 hrs */ + fetchChunkTimeoutMs = 5 * 60 * 1000 /* 5 mins */ + }) { + this.ipfsApiUrl = ipfsApiUrl + this.maxCarBytes = maxCarBytes + this.fetchTimeoutMs = fetchTimeoutMs + this.fetchChunkTimeoutMs = fetchChunkTimeoutMs + } + + /** + * @param {object} config + * @param {string} config.cid + * @param {AbortController} config.abortCtl + */ + async fetch ({ cid, abortCtl }) { + const { ipfsApiUrl, maxCarBytes, fetchTimeoutMs, fetchChunkTimeoutMs } = this + /** + * @param {AsyncIterable} source + */ + async function * streamWatcher (source) { + const fetchTimer = debounce(() => abort(FETCH_TOO_SLOW), fetchTimeoutMs) + const chunkTimer = debounce(() => abort(CHUNK_TOO_SLOW), fetchChunkTimeoutMs) + const clearTimers = () => { + fetchTimer.clear() + chunkTimer.clear() + } + function abort (reason) { + clearTimers() + if (!abortCtl.signal.aborted) { + abortCtl.abort(reason) + } + } + fetchTimer() + chunkTimer() + let size = 0 + for await (const chonk of source) { + chunkTimer() + size += chonk.byteLength + if (size > maxCarBytes) { + abort(TOO_BIG) + throw new Error(TOO_BIG) // kill the stream now so we dont send more bytes + } else { + yield chonk + } + } + clearTimers() + } + + const body = await fetchCar({ cid, ipfsApiUrl, signal: abortCtl.signal }) + return compose(body, streamWatcher) + } + + async testIpfsApi () { + return retry(() => testIpfsApi(this.ipfsApiUrl), { retries: 4 }) + } + + /** + * @param {string[]} origins + */ + async connectTo (origins) { + return connectTo(origins, this.ipfsApiUrl) + } + + /** + * @param {string[]} origins + */ + async disconnect (origins) { + return disconnect(origins, this.ipfsApiUrl) + } + + async waitForGc () { + return waitForGC(this.ipfsApiUrl) + } +} diff --git a/pickup/lib/pickup.js b/pickup/lib/pickup.js new file mode 100644 index 0000000..345106c --- /dev/null +++ b/pickup/lib/pickup.js @@ -0,0 +1,111 @@ +import { Squiss } from 'squiss-ts' +import { CarFetcher, TOO_BIG, CHUNK_TOO_SLOW, FETCH_TOO_SLOW } from './ipfs.js' +import { S3Uploader } from './s3.js' +import { logger } from './logger.js' + +/** + * Use me in prod to set all the things. + * + * @param {Record} env + */ +export function createPickupFromEnv (env = process.env) { + const { + IPFS_API_URL, + SQS_QUEUE_URL, + BATCH_SIZE, + MAX_CAR_BYTES, + FETCH_TIMEOUT_MS, + FETCH_CHUNK_TIMEOUT_MS, + VALIDATION_BUCKET + } = env + + if (!SQS_QUEUE_URL) throw new Error('SQS_QUEUE_URL not found in ENV') + if (!VALIDATION_BUCKET) throw new Error('VALIDATION_BUCKET not found in ENV') + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ + queueUrl: SQS_QUEUE_URL, + maxInFlight: BATCH_SIZE, + noExtensionsAfterSecs: FETCH_TIMEOUT_MS + }), + carFetcher: new CarFetcher({ + ipfsApiUrl: IPFS_API_URL, + maxCarBytes: MAX_CAR_BYTES, + fetchTimeoutMs: FETCH_TIMEOUT_MS, + fetchChunkTimeoutMs: FETCH_CHUNK_TIMEOUT_MS + }), + s3Uploader: new S3Uploader({ + bucket: VALIDATION_BUCKET + }) + }) + + return pickup +} + +/** + * @param {object} config + * @param {Squiss} config.sqsPoller + * @param {CarFetcher} config.carFetcher + * @param {S3Uploader} config.s3Uploader + */ +export function createPickup ({ sqsPoller, carFetcher, s3Uploader }) { + /** + * @param {import('squiss-ts').Message} msg + */ + async function messageHandler (msg) { + const { cid, origins, key } = msg.body + const abortCtl = new AbortController() + const upload = s3Uploader.createUploader({ cid, key }) + try { + logger.info({ cid, origins }, 'Fetching CAR') + await carFetcher.connectTo(origins) + const body = await carFetcher.fetch({ cid, origins, abortCtl }) + await upload(body) + logger.info({ cid, origins }, 'OK. Car in S3') + await msg.del() // the message is handled, remove it from queue. + } catch (err) { + if (abortCtl.signal.reason === TOO_BIG) { + logger.error({ cid, origins, err }, 'Failed to fetch CAR: Too big') + await msg.release() + } else if (abortCtl.signal.reason === CHUNK_TOO_SLOW) { + logger.error({ cid, origins, err }, 'Failed to fetch CAR: chunk too slow') + await msg.release() + } else if (abortCtl.signal.reason === FETCH_TOO_SLOW) { + logger.error({ cid, origins, err }, 'Failed to fetch CAR: fetch too slow') + await msg.release() + } else { + logger.error({ cid, origins, err }, 'Failed to fetch CAR') + if (!msg.isHandled) { + await msg.release() // back to the queue, try again + } + } + } finally { + await carFetcher.disconnect(origins) + await carFetcher.waitForGc() + } + } + + sqsPoller.on('message', messageHandler) + + const pollerStart = sqsPoller.start.bind(sqsPoller) + sqsPoller.start = async () => { + // throw if we can't connect to kubo + await carFetcher.testIpfsApi() + return pollerStart() + } + + return sqsPoller +} + +/** + * @param {import('squiss-ts').ISquissOptions} config + */ +export function createSqsPoller (config) { + return new Squiss({ + // set our default overrides here, we always want these. + autoExtendTimeout: true, + receiveSqsAttributes: ['ApproximateReceiveCount'], + bodyFormat: 'json', + ...config + }) +} diff --git a/pickup/lib/pickupBatch.js b/pickup/lib/pickupBatch.js deleted file mode 100644 index 506d911..0000000 --- a/pickup/lib/pickupBatch.js +++ /dev/null @@ -1,148 +0,0 @@ -import { fetchCar, connectTo, disconnect, waitForGC, ERROR_TIMEOUT } from './ipfs.js' -import { deleteMessage } from './consumer.js' -import { updatePinStatus } from './dynamo.js' -import { logger } from './logger.js' -import { STATE_DONE, STATE_TIMEOUT, STATE_FAILED, STATE_QUEUED } from './downloadStatusManager.js' - -/** - * Fetch CARs for a batch of SQS messages. - * @param {import('sqs-consumer').SQSMessage[]} messages - * @param {string} ipfsApiUrl - * @param {Function} createS3Uploader - * @param {import('@aws-sdk/client-s3'.S3Client)} s3 - * @param {Consumer} queueManager - * @param {import('@aws-sdk/lib-dynamodb'.DynamoDBClient)} dynamo - * @param {string} dynamoTable - * @param {string} validationBucket - * @param {number} timeoutFetchMs - * @param {number} maxRetry - * @param {DownloadStatusManager} downloadStatusManager - * @returns {Promise} - */ -export async function pickupBatch (messages, { - ipfsApiUrl, - createS3Uploader, - s3, - queueManager, - dynamo, - dynamoTable, - validationBucket, - timeoutFetchMs, - maxRetry, - downloadStatusManager -}) { - const jobs = [] - const allOrigins = [] - - logger.info({ messages }, 'Pickup batch start') - - const requestIds = [] - - for (const message of messages) { - const { cid, origins, bucket, key, requestid } = JSON.parse(message.Body) - logger.trace({ cid, requestid }, 'Push message in job list') - jobs.push({ message, requestid, cid, upload: createS3Uploader({ bucket: validationBucket ?? bucket, key, client: s3 }) }) - allOrigins.concat(origins) - requestIds.push(requestid) - } - - // Prepare! - logger.trace({ allOrigins, ipfsApiUrl }, 'Wait for GC and connect to origins') - await Promise.all([ - waitForGC(ipfsApiUrl), - connectTo(allOrigins, ipfsApiUrl) - ]) - - logger.info(`Ready to process ${jobs.length} jobs`) - - // Stores the totalMessages because the `messages`array will be modified in the process - const totalMessages = messages.length - // Stores the requestIds to properly remove the items from the `messages` array - - // Collect the results, just for logging purpose - const resultStats = {} - - // Do! - const res = await Promise.allSettled(jobs.map(async job => { - const { message, cid, upload, requestid } = job - logger.info({ cid, requestid, messageId: message.MessageId }, 'Start job') - - downloadStatusManager.setStatus(cid, STATE_QUEUED) - // Inject a downloadError object to the `upload` function, is required to intercept the timeout error - // because the `abort` action do not allow to pass a code to the call - const downloadError = {} - - try { - const body = await fetchCar(cid, ipfsApiUrl, downloadError, timeoutFetchMs, downloadStatusManager) - logger.info({ cid, requestid, messageId: message.MessageId }, 'IPFS node responded, downloading the car') - await upload({ body, cid, downloadError }) - logger.info({ cid, requestid, messageId: message.MessageId }, 'Car downloaded and stored in S3') - - // After the download some low level action are required to override the `sqs-consumer` library limit. - // The library works with the full batch and the is required to remove the messages while they are processed. - // The processed message is removed from the queue list to avoid further changeVisabilityTimeout. - const arrayRemoveIndex = requestIds.indexOf(requestid) - requestIds.splice(Number(arrayRemoveIndex), 1) - messages.splice(Number(arrayRemoveIndex), 1) - logger.trace({ - cid, - requestid, - messageId: message.MessageId, - arrayRemoveIndex - }, 'Removed processed message from the messages') - - await deleteMessage(queueManager, message) - downloadStatusManager.setStatus(cid, STATE_DONE) - resultStats[cid] = 'success' - } catch (err) { - // The processed message is removed from the queue list to avoid further changeVisabilityTimeout. - const arrayRemoveIndex = requestIds.indexOf(requestid) - requestIds.splice(Number(arrayRemoveIndex), 1) - messages.splice(Number(arrayRemoveIndex), 1) - - const currentRetry = Number(message.Attributes.ApproximateReceiveCount) - if (downloadError.code === ERROR_TIMEOUT || - currentRetry >= maxRetry - ) { - const errorMessage = currentRetry >= maxRetry ? 'Max retry' : 'Download timeout' - logger.error({ cid, requestid, currentRetry, messageId: message.MessageI, arrayRemoveIndex }, - errorMessage) - // Update the status on dynamodb to failed - await updatePinStatus({ - dynamo, - table: dynamoTable, - cid, - status: 'failed', - error: errorMessage - }) - - // Delete the message from the queue - await deleteMessage(queueManager, message) - resultStats[cid] = 'timeout' - downloadStatusManager.setStatus(cid, STATE_TIMEOUT) - } else { - // For any other error the message from the queue is not removed, - // then when the visibility timeout is expired the file is resend on the queue - // A deadLetterQueue should be set to avoid infinite retry. - logger.error({ err, cid, requestid, messageId: message.MessageI, arrayRemoveIndex }, 'Download error') - resultStats[cid] = `fail: ${err.message}` - downloadStatusManager.setStatus(cid, STATE_FAILED) - } - throw err - } finally { - // The message_processed event is fired. - queueManager.emit('message_processed', message) - } - return message // hand back msg so we can ack all that succeded - })) - - // Clear the origins! - await disconnect(allOrigins, ipfsApiUrl) - - const ok = res.filter(r => r.status === 'fulfilled').map(r => r.value) - logger.info({ resultStats, success: ok.length, total: totalMessages }, 'Done processing batch.') - - logger.info(downloadStatusManager.getStatus()) - downloadStatusManager.reset() - return ok -} diff --git a/pickup/lib/s3.js b/pickup/lib/s3.js index eac040f..2e4bb16 100644 --- a/pickup/lib/s3.js +++ b/pickup/lib/s3.js @@ -8,52 +8,64 @@ import { logger } from './logger.js' * @param {import('@aws-sdk/client-s3'.S3Client)} client * @param {string} bucket * @param {string} key - * @param {Stream} body + * @param {Readable} body * @param {string} cid - * @param {object} downloadError - * @param {string} downloadError.code * @returns {Promise} */ -export async function sendToS3 ({ client, bucket, key }, { body, cid, downloadError }) { - const params = { - Metadata: { structure: 'complete' }, - Bucket: bucket, - Key: key, - Body: body - } - +export async function sendToS3 ({ client, bucket, key, body, cid }) { // Handles s3 multipart uploading // see: https://github.com/aws/aws-sdk-js-v3/blob/main/lib/lib-storage/README.md - const s3Upload = new Upload({ client, params }) + const s3Upload = new Upload({ + client, + params: { + Metadata: { structure: 'complete' }, + Bucket: bucket, + Key: key, + Body: body + } + }) body.on('error', (err) => { if (err.code === 'AbortError' || err.constructor.name === 'AbortError') { logger.trace({ err, cid }, 'The abort command was thrown by a ipfs timeout') return } - logger.error({ err, code: downloadError.code, cid }, 'S3 upload error') + logger.error({ err, cid }, 'S3 upload error') }) return s3Upload.done() } -/** - * Create the uploader - * @param {import('@aws-sdk/client-s3'.S3Client)} client - * @param {string} bucket - * @param {string} key - * @returns {import('@aws-sdk/client-s3'.S3Client)} - */ -export function createS3Uploader ({ client = createS3Client(), bucket, key }) { - return sendToS3.bind(null, { client, bucket, key }) -} +export class S3Uploader { + /** + * @param {object} config + * @param {S3Client} s3 + * @param {string} bucket + */ + constructor ({ s3 = new S3Client(), bucket }) { + this.s3 = s3 + this.bucket = bucket + } -/** - * Create the S3Client - * - * @returns {import('@aws-sdk/client-s3'.S3Client)} - */ -export function createS3Client () { - // Expects AWS_* ENV vars set. - return new S3Client({}) + /** + * @param {object} config + * @param {string} cid + * @param {string} key + */ + createUploader ({ cid, key }) { + const { s3, bucket } = this + /** + * @typedef {import('node:stream').Readable} Readable + * @param {Readable} body + */ + return async function (body) { + return sendToS3({ + client: s3, + bucket, + key, + body, + cid + }) + } + } } diff --git a/pickup/package.json b/pickup/package.json index 51a5b12..e6754c9 100644 --- a/pickup/package.json +++ b/pickup/package.json @@ -18,20 +18,20 @@ "@aws-sdk/lib-dynamodb": "^3.112.0", "@aws-sdk/lib-storage": "^3.97.0", "@multiformats/multiaddr": "^11.4.0", - "async-retry": "1.3.3", "debounce": "^1.2.1", "multiaddr": "^10.0.1", "multiformats": "^9.6.5", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", - "sqs-consumer": "^5.7.0" + "squiss-ts": "^4.4.1" }, "devDependencies": { "@aws-sdk/client-sqs": "^3.118.1", "@web-std/blob": "^3.0.4", "ava": "^4.3.0", "ipfs-car": "^0.7.0", - "standard": "^16.0.4", + "standard": "^17.0.0", "testcontainers": "^8.10.1" }, "engines": { diff --git a/pickup/test/_compose.js b/pickup/test/_compose.js index 083100c..5b1358b 100644 --- a/pickup/test/_compose.js +++ b/pickup/test/_compose.js @@ -71,7 +71,7 @@ export async function createQueue (sqsPort, sqs) { await sqs.send(new CreateQueueCommand({ QueueName, Attributes: { - DelaySeconds: '1', + DelaySeconds: '0', MessageRetentionPeriod: '10' } })) diff --git a/pickup/test/_helpers.js b/pickup/test/_helpers.js index 3e857b4..46bd80b 100644 --- a/pickup/test/_helpers.js +++ b/pickup/test/_helpers.js @@ -1,5 +1,5 @@ import stream from 'node:stream' -import retry from 'async-retry' +import retry from 'p-retry' import { DynamoDBDocumentClient, GetCommand, PutCommand } from '@aws-sdk/lib-dynamodb' import { packToBlob } from 'ipfs-car/pack/blob' import { MemoryBlockStore } from 'ipfs-car/blockstore/memory' @@ -81,21 +81,11 @@ export async function sleep (ms) { return new Promise((resolve) => setTimeout(() => resolve(), ms)) } -export async function verifyMessage ({ msg, cars, dynamoClient, dynamoTable, t, bucket, s3, expectedError }) { +export async function verifyMessage ({ msg, cars, t, bucket, s3 }) { try { - const message = JSON.parse(msg.Body) + const message = msg.body const index = Number(message.requestid) - // If there is a timeout, the dynamo item status should be updated to `failed` - if (cars[index].expectedResult === 'failed') { - const item = await getValueFromDynamo({ dynamoClient, dynamoTable, cid: cars[index].cid }) - t.is(item.cid, cars[index].cid) - t.is(item.status, 'failed') - t.is(item.error, expectedError) - t.truthy(item.downloadFailedAt > item.created) - } else if (cars[index].expectedResult === 'error') { - // after the first error, the expectedResult of the car is set to success to allow the upload in the next step - cars[index].expectedResult = 'success' - } else { + if (cars[index].expectedResult === 'success') { // If succeed, the s3 file should have the same content of the car generated const { cid: msgCid } = message t.is(msgCid, cars[index].cid) @@ -112,7 +102,7 @@ export async function verifyMessage ({ msg, cars, dynamoClient, dynamoTable, t, } } -export async function stopConsumer (consumer) { +export async function stopPickup (consumer) { consumer.stop() return await retry(() => !consumer.isRunning, { retries: 5 diff --git a/pickup/test/consumer.test.js b/pickup/test/consumer.test.js deleted file mode 100644 index 961686b..0000000 --- a/pickup/test/consumer.test.js +++ /dev/null @@ -1,85 +0,0 @@ -import { SendMessageCommand } from '@aws-sdk/client-sqs' -import { Consumer } from 'sqs-consumer' -import { createConsumer } from '../lib/consumer.js' -import { compose } from './_compose.js' -import test from 'ava' -import { DownloadStatusManager } from '../lib/downloadStatusManager.js' - -test.before(async t => { - t.timeout(1000 * 60) - t.context = await compose() -}) - -test.after(async t => { - await t.context.shutDownDockers() -}) - -// verify the lib behaves as expected -test('sqs-consumer', async t => { - const testCid = 'hello!' - const { sqs, createQueue } = t.context - - const QueueUrl = await createQueue() - await sqs.send(new SendMessageCommand({ - DelaySeconds: 1, - MessageBody: JSON.stringify({ cid: testCid }), - QueueUrl - })) - - await new Promise((resolve, reject) => { - const app = Consumer.create({ - queueUrl: QueueUrl, - handleMessage: async (message) => { - const res = JSON.parse(message.Body) - t.is(res.cid, testCid) - app.stop() - resolve(true) - } - }) - app.on('error', (err) => { - reject(err) - }) - app.on('processing_error', (err) => { - reject(err) - }) - app.on('timeout_error', (err) => { - reject(err) - }) - app.start() - }) -}) - -test('createConsumer', async t => { - t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3 } = t.context - - const cid = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' - const key = `psa/${cid}.car` - const queueUrl = await createQueue() - const bucket = await createBucket() - const consumer = await createConsumer({ ipfsApiUrl, queueUrl, s3, downloadStatusManager: new DownloadStatusManager() }) - const done = new Promise((resolve, reject) => { - consumer.on('message_processed', msg => { - const { cid: msgCid } = JSON.parse(msg.Body) - t.is(msgCid, cid) - resolve() - }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) - }) - consumer.start() - - await sqs.send(new SendMessageCommand({ - DelaySeconds: 1, - MessageBody: JSON.stringify({ cid, bucket, key, origins: [], requestid: 'test1' }), - QueueUrl: queueUrl - })) - - return done -}) - -test('createConsumer errors if can\'t connect to IPFS', async t => { - const { createQueue } = t.context - const queueUrl = await createQueue() - await t.throwsAsync(createConsumer({ ipfsApiUrl: 'http://127.0.0.1', queueUrl, downloadStatusManager: new DownloadStatusManager() })) -}) diff --git a/pickup/test/downloadStatusManager.test.js b/pickup/test/downloadStatusManager.test.js deleted file mode 100644 index a7eb83c..0000000 --- a/pickup/test/downloadStatusManager.test.js +++ /dev/null @@ -1,101 +0,0 @@ -import { - DownloadStatusManager, - STATE_DONE, - STATE_DOWNLOADING, - STATE_FAILED, - STATE_QUEUED, - STATE_TIMEOUT -} from '../lib/downloadStatusManager.js' -import test from 'ava' - -// verify the lib behaves as expected -test('with no data should not be running', async t => { - const statusManager = new DownloadStatusManager() - t.falsy(statusManager.isRunning()) -}) - -test('with a queued value should be running', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('123', STATE_QUEUED) - t.deepEqual(statusManager.getStatus(), { 123: { state: STATE_QUEUED } }) - t.truthy(statusManager.isRunning()) -}) - -test('set the downloading state with a size', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('123', STATE_QUEUED) - statusManager.setStatus('123', STATE_DOWNLOADING, 100) - t.deepEqual(statusManager.getStatus(), { 123: { state: STATE_DOWNLOADING, size: 100 } }) - t.truthy(statusManager.isRunning()) -}) - -test('set the done state with a size', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('123', STATE_QUEUED) - statusManager.setStatus('123', STATE_DOWNLOADING, 100) - statusManager.setStatus('123', STATE_DONE, 200) - t.deepEqual(statusManager.getStatus(), { 123: { state: STATE_DONE, size: 200 } }) - t.falsy(statusManager.isRunning()) -}) - -test('set multiple state and should be running', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('123', STATE_QUEUED) - statusManager.setStatus('456', STATE_DOWNLOADING, 100) - statusManager.setStatus('780', STATE_DONE, 200) - statusManager.setStatus('abc', STATE_FAILED) - statusManager.setStatus('efg', STATE_TIMEOUT) - t.deepEqual(statusManager.getStatus(), - { - 123: { - state: 'queued' - }, - 456: { - size: 100, - state: 'downloading' - }, - 780: { - size: 200, - state: 'done' - }, - abc: { - state: 'failed' - }, - efg: { - state: 'timeout' - } - }) - t.truthy(statusManager.isRunning()) -}) - -test('set multiple state and should be not running', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('780', STATE_DONE, 200) - statusManager.setStatus('abc', STATE_FAILED) - statusManager.setStatus('efg', STATE_TIMEOUT) - t.deepEqual(statusManager.getStatus(), - { - 780: { - size: 200, - state: 'done' - }, - abc: { - state: 'failed' - }, - efg: { - state: 'timeout' - } - }) - t.falsy(statusManager.isRunning()) -}) - -test('should reset the state', async t => { - const statusManager = new DownloadStatusManager() - statusManager.setStatus('780', STATE_DONE, 200) - statusManager.setStatus('abc', STATE_FAILED) - statusManager.setStatus('efg', STATE_TIMEOUT) - statusManager.reset() - t.deepEqual(statusManager.getStatus(), - {}) - t.falsy(statusManager.isRunning()) -}) diff --git a/pickup/test/pickup.test.js b/pickup/test/pickup.test.js index 2d6c594..18ab638 100644 --- a/pickup/test/pickup.test.js +++ b/pickup/test/pickup.test.js @@ -1,11 +1,12 @@ import nock from 'nock' import test from 'ava' +import { nanoid } from 'nanoid' import { SendMessageCommand } from '@aws-sdk/client-sqs' - -import { createConsumer } from '../lib/consumer.js' +import { createPickup, createSqsPoller } from '../lib/pickup.js' import { compose } from './_compose.js' -import { prepareCid, verifyMessage, sleep, getMessagesFromSQS, stopConsumer, getValueFromDynamo } from './_helpers.js' -import { DownloadStatusManager, STATE_DONE, STATE_QUEUED, STATE_DOWNLOADING } from '../lib/downloadStatusManager.js' +import { prepareCid, verifyMessage, sleep, getMessagesFromSQS, stopPickup } from './_helpers.js' +import { CarFetcher } from '../lib/ipfs.js' +import { S3Uploader } from '../lib/s3.js' test.before(async t => { t.timeout(1000 * 60) @@ -18,30 +19,37 @@ test.after(async t => { }) test('throw an error if can\'t connect to IPFS', async t => { - const { createQueue } = t.context + const { createQueue, createBucket, s3 } = t.context const queueUrl = await createQueue() - await t.throwsAsync(createConsumer({ - ipfsApiUrl: 'http://127.0.0.1', - queueUrl, - testMaxRetry: 1, - testTimeoutMs: 50, - downloadStatusManager: new DownloadStatusManager() - })) + const bucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), + carFetcher: new CarFetcher({ ipfsApiUrl }), + s3Uploader: new S3Uploader({ s3, bucket }) + }) + await t.throwsAsync(() => pickup.start()) }) -test('Process 3 messages concurrently and the last has a timeout', async t => { +test('Process 1 message successfully', async t => { t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoEndpoint, dynamoTable } = t.context + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() const validationBucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' }, maxInFlight: 1, activePollIntervalMs: 10000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 1000, fetchChunkTimeoutMs: 1000 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) // Preapre the data for the test const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 3000, expectedResult: 'failed' }) + await prepareCid({ dynamoClient, dynamoTable, expectedResult: 'success' }) ] // Configure nock to mock the response @@ -50,12 +58,12 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { .post('/api/v0/id')// Alive .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') + .optionally().reply(200, 'GC Success').persist() cars.forEach((car, index) => { nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin .reply(200, () => { - return cars[index].carReadableStream + return cars[index].car // send the whole car body at once }) }) @@ -67,75 +75,57 @@ test('Process 3 messages concurrently and the last has a timeout', async t => { })) } - // Create the consumer - const consumer = await createConsumer( - { - ipfsApiUrl, - queueUrl, - s3, - heartbeatInterval: 2, - visibilityTimeout: 3, - dynamoEndpoint, - dynamoTable, - timeoutFetchMs: 2000, - downloadStatusManager: new DownloadStatusManager(), - validationBucket - } - ) - - // The number of the messages resolved, when is max close the test and finalize - let resolved = 0 + let msgCount = 0 + let msgDel = 0 const done = new Promise((resolve, reject) => { - consumer.on('message_received', async msg => { - const message = JSON.parse(msg.Body) - const index = Number(message.requestid) - if (cars[index].expectedResult !== 'error') { - const myBuffer = await cars[index].car.arrayBuffer() - - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(0, 10))) - await sleep(cars[index].timeBetweenChunks) - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(10))) - cars[index].carReadableStream.push(null) - } - }) - - consumer.on('message_processed', async msg => { + const nope = (reason) => { + stopPickup(pickup) + t.fail(reason) + reject(new Error(reason)) + } + pickup.on('message', () => msgCount++) + pickup.on('released', () => nope('unexpected released event')) + pickup.on('error', () => nope('unexpected error event')) + pickup.on('timeoutReached', () => nope('unexpected timeoutReached event')) + pickup.on('deleted', () => { + msgDel++ + t.is(msgCount, 1) + t.is(msgDel, 1) try { - await verifyMessage({ msg, cars, dynamoClient, dynamoTable, t, bucket: validationBucket, s3, expectedError: 'Download timeout' }) - resolved++ - - if (resolved === cars.length) { - await sleep(5) - const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) - t.is(resultMessages, undefined) - nockPickup.done() - await stopConsumer(consumer) - resolve() - } - } catch (e) { - reject(e) + stopPickup(pickup) + nockPickup.done() + resolve() + } catch (err) { + console.log('error', err) + reject(err) } }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) }) - consumer.start() + await pickup.start() return done }) -test('Process 1 message that fails and returns in the list', async t => { +test('Fail 1 message that sends data but exceeds fetchTimeoutMs', async t => { t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoEndpoint, dynamoTable } = t.context + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() + const validationBucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' }, maxInFlight: 1, activePollIntervalMs: 10000, idlePollIntervalMs: 10000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 500, fetchChunkTimeoutMs: 2000 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) // Preapre the data for the test const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'error' }) + await prepareCid({ dynamoClient, dynamoTable, expectedResult: 'failed' }) ] // Configure nock to mock the response @@ -144,15 +134,13 @@ test('Process 1 message that fails and returns in the list', async t => { .post('/api/v0/id')// Alive .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') - .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') + .optionally().reply(200, 'GC Success').persist() cars.forEach((car, index) => { nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin - .reply((uri, requestBody) => [400, 'KO']) - nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin - .reply((uri, requestBody) => [200, cars[index].carReadableStream]) + .reply(200, () => { + return cars[index].carReadableStream // we would write to this stream, but we want the test to fail + }) }) // Send the SQS messages in queue @@ -163,78 +151,127 @@ test('Process 1 message that fails and returns in the list', async t => { })) } - // Create the consumer - const consumer = await createConsumer( - { - ipfsApiUrl, - queueUrl, - s3, - heartbeatInterval: 2, - visibilityTimeout: 3, - dynamoEndpoint, - dynamoTable, - timeoutFetchMs: 2000, - downloadStatusManager: new DownloadStatusManager() - } - ) - - // The number of the messages resolved, when is max close the test and finalize - let resolved = 0 + let msgReleased = 0 const done = new Promise((resolve, reject) => { - consumer.on('message_received', async msg => { - const message = JSON.parse(msg.Body) - const index = Number(message.requestid) - if (cars[index].expectedResult !== 'error') { - const myBuffer = await cars[index].car.arrayBuffer() - - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(0, 10))) - await sleep(cars[index].timeBetweenChunks) - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(10))) - cars[index].carReadableStream.push(null) + const nope = (reason) => { + stopPickup(pickup) + t.fail(reason) + reject(new Error(reason)) + } + pickup.on('released', () => { + msgReleased++ + t.is(msgReleased, 1) + try { + stopPickup(pickup) + nockPickup.done() + resolve() + } catch (err) { + console.log('error', err) + reject(err) } }) + pickup.on('error', () => nope('unexpected error event')) + pickup.on('timeoutReached', () => nope('unexpected timeoutReached event')) + pickup.on('deleted', () => nope('unexpected deleted event')) + }) - consumer.on('message_processed', async msg => { - try { - await verifyMessage({ msg, cars, dynamoClient, dynamoTable, t, bucket, s3 }) - resolved++ + await pickup.start() - // The +1 is add to manage the second try - if (resolved === cars.length + 1) { - await sleep(5) - const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) - t.is(resultMessages, undefined) + return done +}) - nockPickup.done() - await stopConsumer(consumer) - resolve() - } - } catch (e) { - reject(e) +test('Fail 1 message that sends data but exceeds fetchChunkTimeoutMs', async t => { + t.timeout(1000 * 60) + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context + + const queueUrl = await createQueue() + const bucket = await createBucket() + const validationBucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' }, maxInFlight: 1, activePollIntervalMs: 10000, idlePollIntervalMs: 10000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchTimeoutMs: 2000, fetchChunkTimeoutMs: 1000 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) + + // Preapre the data for the test + const cars = [ + await prepareCid({ dynamoClient, dynamoTable, expectedResult: 'failed' }) + ] + + // Configure nock to mock the response + const nockPickup = nock(ipfsApiUrl) + nockPickup + .post('/api/v0/id')// Alive + .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) + .post('/api/v0/repo/gc?silent=true')// Garbage collector + .optionally().reply(200, 'GC Success').persist() + + cars.forEach((car, index) => { + nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin + .reply(200, () => { + return cars[index].carReadableStream // we would write to this stream, but we want the test to fail + }) + }) + + // Send the SQS messages in queue + for (let i = 0; i < cars.length; i++) { + await sqs.send(new SendMessageCommand({ + MessageBody: JSON.stringify({ cid: cars[i].cid, bucket, key: cars[i].key, origins: [], requestid: i }), + QueueUrl: queueUrl + })) + } + + let msgReleased = 0 + + const done = new Promise((resolve, reject) => { + const nope = (reason) => { + stopPickup(pickup) + t.fail(reason) + reject(new Error(reason)) + } + pickup.on('released', () => { + msgReleased++ + t.is(msgReleased, 1) + try { + stopPickup(pickup) + nockPickup.done() + resolve() + } catch (err) { + console.log('error', err) + reject(err) } }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) + pickup.on('error', () => nope('unexpected error event')) + pickup.on('timeoutReached', () => nope('unexpected timeoutReached event')) + pickup.on('deleted', () => nope('unexpected deleted event')) }) - consumer.start() + await pickup.start() return done }) -test('Process 3 messages concurrently and the last has an error', async t => { +test('Fail 1 message that sends data but exceeds maxCarBytes', async t => { t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoEndpoint, dynamoTable } = t.context + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() + const validationBucket = await createBucket() + const ipfsApiUrl = `https://${nanoid()}:6000` + + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' }, maxInFlight: 1, activePollIntervalMs: 10000, idlePollIntervalMs: 10000 }), + carFetcher: new CarFetcher({ ipfsApiUrl, maxCarBytes: 1 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) // Preapre the data for the test const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'error' }) + await prepareCid({ dynamoClient, dynamoTable, expectedResult: 'failed' }) ] // Configure nock to mock the response @@ -243,20 +280,12 @@ test('Process 3 messages concurrently and the last has an error', async t => { .post('/api/v0/id')// Alive .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') - .post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') + .optionally().reply(200, 'GC Success').persist() cars.forEach((car, index) => { - if (car.expectedResult === 'error') { - nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin - .reply(400, () => { - return 'OK' - }) - } nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin .reply(200, () => { - return cars[index].carReadableStream + return cars[index].car }) }) @@ -268,103 +297,66 @@ test('Process 3 messages concurrently and the last has an error', async t => { })) } - const downloadStatusManager = new DownloadStatusManager() - // Create the consumer - const consumer = await createConsumer( - { - ipfsApiUrl, - queueUrl, - s3, - heartbeatInterval: 2, - visibilityTimeout: 3, - dynamoEndpoint, - dynamoTable, - timeoutFetchMs: 2000, - downloadStatusManager, - downloadStatusLoggerSeconds: 1 - } - ) - - // The number of the messages resolved, when is max close the test and finalize - let resolved = 0 + let msgReleased = 0 const done = new Promise((resolve, reject) => { - consumer.on('message_received', async msg => { - const message = JSON.parse(msg.Body) - const index = Number(message.requestid) - if (cars[index].expectedResult !== 'error') { - const myBuffer = await cars[index].car.arrayBuffer() - - await sleep(cars[index].timeBetweenChunks) - t.deepEqual(downloadStatusManager.getStatus()[cars[index].cid], { - state: STATE_QUEUED - }) - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(0, 10))) - await sleep(cars[index].timeBetweenChunks) - t.deepEqual(downloadStatusManager.getStatus()[cars[index].cid], { - size: 10, - state: STATE_DOWNLOADING - }) - cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(10))) - await sleep(cars[index].timeBetweenChunks) - t.truthy(downloadStatusManager.getStatus()[cars[index].cid].size > 100) - t.truthy(downloadStatusManager.getStatus()[cars[index].cid].state, STATE_DOWNLOADING) - cars[index].carReadableStream.push(null) - t.truthy(downloadStatusManager.getStatus()[cars[index].cid].state, STATE_DONE) - } - }) - - consumer.on('message_processed', async msg => { + const nope = (reason) => { + stopPickup(pickup) + t.fail(reason) + reject(new Error(reason)) + } + pickup.on('released', () => { + msgReleased++ + t.is(msgReleased, 1) try { - await verifyMessage({ msg, cars, dynamoClient, dynamoTable, t, bucket, s3 }) - resolved++ - - if (resolved === cars.length + 1) { - await sleep(2000) - const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) - t.is(resultMessages, undefined) - await nockPickup.done() - t.falsy(Object.keys(downloadStatusManager.getStatus()).length) - await stopConsumer(consumer) - resolve() - } - } catch (e) { - reject(e) + stopPickup(pickup) + nockPickup.done() + resolve() + } catch (err) { + console.log('error', err) + reject(err) } }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) + pickup.on('error', () => nope('unexpected error event')) + pickup.on('timeoutReached', () => nope('unexpected timeoutReached event')) + pickup.on('deleted', () => nope('unexpected deleted event')) }) - consumer.start() + await pickup.start() return done }) -test('Process 1 message that fails with a max retry', async t => { +test('Process 3 messages concurrently and the last has a timeout', async t => { t.timeout(1000 * 60) - const { createQueue, createBucket, ipfsApiUrl, sqs, s3, dynamoClient, dynamoEndpoint, dynamoTable } = t.context + const { createQueue, createBucket, sqs, s3, dynamoClient, dynamoTable } = t.context const queueUrl = await createQueue() const bucket = await createBucket() + const validationBucket = await createBucket() // Preapre the data for the test const cars = [ - await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'error' }) + await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), + await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 500, expectedResult: 'success' }), + await prepareCid({ dynamoClient, dynamoTable, timeBetweenChunks: 3000, expectedResult: 'failed' }) ] - const retryNum = 3 + const ipfsApiUrl = `https://${nanoid()}:6000` // Configure nock to mock the response const nockPickup = nock(ipfsApiUrl) nockPickup .post('/api/v0/id')// Alive .reply(200, JSON.stringify({ AgentVersion: 'Agent 1', ID: '12345465' })) - for (let i = 0; i < retryNum; i++) { - nockPickup.post('/api/v0/repo/gc?silent=true')// Garbage collector - .reply(200, 'GC Success') - nockPickup.post(`/api/v0/dag/export?arg=${cars[0].cid}`) // Get pin - .reply((uri, requestBody) => [400, 'KO']) - } + .post('/api/v0/repo/gc?silent=true')// Garbage collector + .reply(200, 'GC Success').persist() + + cars.forEach((car, index) => { + nockPickup.post(`/api/v0/dag/export?arg=${car.cid}`) // Get pin + .reply(200, () => { + return cars[index].carReadableStream + }) + }) // Send the SQS messages in queue for (let i = 0; i < cars.length; i++) { @@ -374,67 +366,51 @@ test('Process 1 message that fails with a max retry', async t => { })) } - // Create the consumer - const consumer = await createConsumer( - { - ipfsApiUrl, - queueUrl, - s3, - heartbeatInterval: 2, - visibilityTimeout: 3, - dynamoEndpoint, - dynamoTable, - timeoutFetchMs: 2000, - maxRetry: 3, - downloadStatusManager: new DownloadStatusManager() - } - ) + const pickup = createPickup({ + sqsPoller: createSqsPoller({ queueUrl, awsConfig: { region: 'us-east-1' } }), + carFetcher: new CarFetcher({ ipfsApiUrl, fetchChunkTimeoutMs: 2000 }), + s3Uploader: new S3Uploader({ s3, bucket: validationBucket }) + }) // The number of the messages resolved, when is max close the test and finalize let resolved = 0 - let shouldFail = false const done = new Promise((resolve, reject) => { - consumer.on('message_received', async msg => { - if (Number(msg.Attributes.ApproximateReceiveCount) > retryNum) { - shouldFail = true + pickup.on('message', async msg => { + const message = msg.body + const index = Number(message.requestid) + if (cars[index].expectedResult !== 'error') { + const myBuffer = await cars[index].car.arrayBuffer() + + cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(0, 10))) + await sleep(cars[index].timeBetweenChunks) + cars[index].carReadableStream.push(Buffer.from(myBuffer.slice(10))) + cars[index].carReadableStream.push(null) } }) - consumer.on('message_processed', async msg => { + pickup.on('handled', async msg => { try { + await verifyMessage({ msg, cars, t, bucket: validationBucket, s3 }) resolved++ - // The +1 is add to manage the second try - if (resolved === retryNum) { - await sleep(50) + if (resolved === cars.length) { + await sleep(5) const resultMessages = await getMessagesFromSQS({ queueUrl, length: cars.length, sqs }) t.is(resultMessages, undefined) - - const item = await getValueFromDynamo({ dynamoClient, dynamoTable, cid: cars[0].cid }) - t.is(item.cid, cars[0].cid) - t.is(item.status, 'failed') - t.is(item.error, 'Max retry') - t.truthy(item.downloadFailedAt > item.created) - - await sleep(4000) nockPickup.done() - await stopConsumer(consumer) - if (shouldFail) { - reject(new Error('Message not set to failed after max retry')) - } else { - resolve() - } + await stopPickup(pickup) + resolve() } } catch (e) { reject(e) } }) - consumer.on('processing_error', reject) - consumer.on('timeout_error', reject) + pickup.on('error', reject) + pickup.on('timeoutReached', reject) }) - consumer.start() + await pickup.start() return done }) diff --git a/stacks/BasicApiStack.ts b/stacks/BasicApiStack.ts index 10d5311..e6ab11e 100644 --- a/stacks/BasicApiStack.ts +++ b/stacks/BasicApiStack.ts @@ -14,7 +14,7 @@ export function BasicApiStack ({ queue: { deadLetterQueue: { queue: dlq.cdk.queue, - maxReceiveCount: 11 + maxReceiveCount: 3 } } } diff --git a/stacks/PickupStack.ts b/stacks/PickupStack.ts index aacb7e3..1e214bd 100644 --- a/stacks/PickupStack.ts +++ b/stacks/PickupStack.ts @@ -70,12 +70,15 @@ export function PickupStack ({ app, stack }: StackContext): void { ephemeralStorageGiB: 64, // max 200 environment: { SQS_QUEUE_URL: basicApi.queue.queueUrl, - IPFS_API_URL: 'http://127.0.0.1:5001', DYNAMO_TABLE_NAME: basicApi.dynamoDbTable.tableName, - BATCH_SIZE: process.env.BATCH_SIZE ?? '5', - TIMEOUT_FETCH: process.env.TIMEOUT_FETCH ?? '60', - MAX_RETRY: process.env.MAX_RETRY ?? '10', - VALIDATION_BUCKET: (validationBucket != null) ? validationBucket.bucketName : '' + VALIDATION_BUCKET: (validationBucket != null) ? validationBucket.bucketName : '', + ...optionalEnv([ + 'IPFS_API_URL', + 'BATCH_SIZE', + 'MAX_CAR_BYTES', + 'FETCH_TIMEOUT_MS', + 'FETCH_CHUNK_TIMEOUT_MS' + ]) }, queue: basicApi.queue.cdk.queue, enableExecuteCommand: true, @@ -283,3 +286,17 @@ function createVPCGateways (vpc: ec2.IVpc): void { throw new Error('Can\'t add gateway to undefined VPC') } } + +/** + * Create an env object to pass specified keys to an sst construct + * from keys that may be on the current process.env + */ +export function optionalEnv (keys: string[]): Record { + const res: Record = {} + for (const key of keys) { + const val = process.env[key] + if (val === undefined) continue + res[key] = val + } + return res +} diff --git a/test/docker.test.js b/test/docker.test.js index b6fb904..f955f21 100644 --- a/test/docker.test.js +++ b/test/docker.test.js @@ -4,10 +4,6 @@ import test from 'ava' // builds image and starts container test('build', async t => { t.timeout(1000 * 120) - const SQS_QUEUE_URL = 'http://127.0.0.1' - const IPFS_API_URL = 'http://127.0.0.1:5001' - const DYNAMO_TABLE_NAME = 'test-table' - const DYNAMO_DB_ENDPOINT = 'http://127.0.0.1:9000' const img = await GenericContainer.fromDockerfile(new URL('../', import.meta.url).pathname) .build() // In case the test fails comment this and uncomment the log snippet @@ -16,10 +12,9 @@ test('build', async t => { await t.throwsAsync(img.start()) // set all the things it needs - img.withEnv('IPFS_API_URL', IPFS_API_URL) - img.withEnv('SQS_QUEUE_URL', SQS_QUEUE_URL) - img.withEnv('DYNAMO_TABLE_NAME', DYNAMO_TABLE_NAME) - img.withEnv('DYNAMO_DB_ENDPOINT', DYNAMO_DB_ENDPOINT) + img.withEnv('IPFS_API_URL', 'http://127.0.0.1:5001') + img.withEnv('SQS_QUEUE_URL', 'http://127.0.0.1') + img.withEnv('VALIDATION_BUCKET', 'foo') let pickup try { diff --git a/validator/package.json b/validator/package.json index 7b8cfe2..1d523f3 100644 --- a/validator/package.json +++ b/validator/package.json @@ -16,12 +16,12 @@ "@aws-sdk/lib-dynamodb": "^3.112.0", "@aws-sdk/lib-storage": "^3.97.0", "@ipld/car": "5.1.0", - "async-retry": "1.3.3", "debounce": "^1.2.1", "linkdex": "2.0.0", "multiaddr": "10.0.1", "multiformats": "11.0.1", "node-fetch": "^3.2.10", + "p-retry": "^5.1.2", "pino": "8.8.0", "sqs-consumer": "^5.7.0" }, diff --git a/validator/test/_helpers.js b/validator/test/_helpers.js index 495bd90..f17266f 100644 --- a/validator/test/_helpers.js +++ b/validator/test/_helpers.js @@ -1,5 +1,5 @@ import stream from 'node:stream' -import retry from 'async-retry' +import retry from 'p-retry' import { DynamoDBDocumentClient, GetCommand, PutCommand } from '@aws-sdk/lib-dynamodb' import { packToBlob } from 'ipfs-car/pack/blob' import { MemoryBlockStore } from 'ipfs-car/blockstore/memory'