From 490341b13f7efb0d9c68a5de310d3b71ebdcc363 Mon Sep 17 00:00:00 2001 From: ronenkapelian <72082238+ronenkapelian@users.noreply.github.com> Date: Wed, 8 Mar 2023 15:52:51 +0200 Subject: [PATCH] Add features collections api (#55) * feat: multi resolution api * feat: draft version of new ROI exporting API - No tests! * chore: pr notes + finalizing implementation * fix: lint fix * fix: log level for polling * fix: first pr note about files name * fix: pr round 2 notes * fix: lint fix * fix: logging for pr notes * fix: log for callback * fix: logs * fix: improve logs for pr request * fix: log notes for existing job * fix: linter * fix: pr notes + refactoring * fix: logs on getmap api * test: integration + unit for ROI API * fix: prettier * fix: pr notes for testing * fix: pr notes for tests #2 * fix: pr notes #3 * fix: prittier --- openapi3.yaml | 241 ++++- package-lock.json | 98 +- package.json | 4 +- src/clients/callbackClient.ts | 4 +- src/clients/jobManagerWrapper.ts | 133 ++- src/common/interfaces.ts | 154 ++- src/common/utils.ts | 42 +- .../controllers/createPackageController.ts | 26 +- .../models/createPackageManager.ts | 413 +++++++- .../routes/createPackageRouter.ts | 1 + src/index.ts | 3 +- src/pollingManager.ts | 39 +- src/tasks/models/tasksManager.ts | 170 ++- .../configurations/integration/jest.config.js | 19 +- tests/configurations/unit/jest.config.js | 13 +- .../createPackage/createExportPackage.spec.ts | 323 ++++++ .../createPackage/createPackage.spec.ts | 10 +- .../helpers/createPackageSender.ts | 6 +- tests/mocks/clients/jobManagerWrapper.ts | 9 + tests/mocks/clients/packageManager.ts | 3 + tests/mocks/data.ts | 776 +++++++++++++- tests/mocks/data/mockJob.ts | 58 +- tests/unit/clients/jobManagerClient.spec.ts | 372 +++++-- tests/unit/common/utils/utils.spec.ts | 39 + .../models/createPackageModel.spec.ts | 982 ++++++++++++------ .../createPackage/models/tasksModel.spec.ts | 773 ++++++++++---- 26 files changed, 3996 insertions(+), 715 deletions(-) create mode 100644 tests/integration/createPackage/createExportPackage.spec.ts diff --git a/openapi3.yaml b/openapi3.yaml index 6c5947f..0f9bd7c 100644 --- a/openapi3.yaml +++ b/openapi3.yaml @@ -13,6 +13,7 @@ paths: - createGpkg summary: Trigger export geopackage process operationId: exportTilesToGpkg + deprecated: true requestBody: $ref: '#/components/requestBodies/ExportGetmapBody' responses: @@ -50,6 +51,50 @@ paths: application/json: schema: $ref: '#/components/schemas/error' + /create/roi: + post: + tags: + - createGpkg + summary: Trigger export geopackages based on providing Regions of interest (multi-resolution) + operationId: byRoi + requestBody: + $ref: '#/components/requestBodies/ExportByRoiBody' + responses: + '200': + description: OK + content: + application/json: + schema: + oneOf: + - $ref: '#/components/schemas/createGpkgJobResponse' + - $ref: '#/components/schemas/exportNaiveCacheJobResponse' + discriminator: + propertyName: response + '400': + description: Bad Request + content: + application/json: + schema: + $ref: '#/components/schemas/error' + '404': + description: Could not find layer with matched dbId + content: + application/json: + schema: + $ref: '#/components/schemas/error' + '500': + description: Internal Server Error + content: + application/json: + schema: + $ref: '#/components/schemas/internalError' + '507': + description: Insufficient Storage on disk for exporting + content: + application/json: + schema: + $ref: '#/components/schemas/error' + /taskStatus/{jobId}: get: tags: @@ -97,6 +142,13 @@ components: application/json: schema: $ref: '#/components/schemas/exportGetMap' + ExportByRoiBody: + description: Export to gpkg via FeatureCollection + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/exportFromFeatures' schemas: CommonResponse: type: object @@ -119,7 +171,7 @@ components: description: ID as the primary key from the Raster Catalog bbox: oneOf: - - $ref: '#/components/schemas/BBOX' + - $ref: '#/components/schemas/BBox' - $ref: '#/components/schemas/Geometry' targetResolution: type: number @@ -149,6 +201,57 @@ components: - 34.82237261707599 - 31.96426962177354 targetResolution: 0.0000429153442382812 + callbackURLs: + - http://example.getmap.com/callback + - http://example.getmap.com/callback2 + crs: EPSG:4326 + priority: 0 + exportFromFeatures: + type: object + properties: + dbId: + type: string + format: uuid + description: ID as the primary key from the Raster Catalog + roi: + $ref: '#/components/schemas/FeatureCollection' + callbackURLs: + type: array + items: + type: string + description: The callback URL to notify the process if finished + crs: + $ref: '#/components/schemas/CRS' + priority: + type: number + description: The priority of the record. Maximum priority = most urgent. + minimum: 0 + maximum: 999999999 + required: + - dbId + - callbackURLs + example: + dbId: ef03ca54-c68e-4ca8-8432-50ae5ad7a7f8 + roi: + type: FeatureCollection + features: + - type: Feature + properties: + maxResolutionDeg: 0.072 + geometry: + type: Polygon + coordinates: + - - - 34.82836896556114 + - 32.03918441418732 + - - 34.81210152170695 + - 32.03918441418732 + - - 34.81210152170695 + - 32.02539369969462 + - - 34.82836896556114 + - 32.02539369969462 + - - 34.82836896556114 + - 32.03918441418732 + callbackURLs: - http://example.getmap.com/callback - http://example.getmap.com/callback2 @@ -159,10 +262,6 @@ components: - $ref: '#/components/schemas/CommonResponse' type: object properties: - status: - type: string - enum: - - In-Progress id: type: string format: uuid @@ -197,7 +296,7 @@ components: type: string bbox: oneOf: - - $ref: '#/components/schemas/BBOX' + - $ref: '#/components/schemas/BBox' - $ref: '#/components/schemas/Geometry' targetResolution: type: number @@ -217,6 +316,34 @@ components: - targetResolution - requestId - success + exportNaiveCacheJobResponse: + allOf: + - $ref: '#/components/schemas/CommonResponse' + type: object + properties: + links: + $ref: '#/components/schemas/callbackLinks' + expirationTime: + type: string + format: date + fileSize: + type: number + recordCatalogId: + type: string + format: uuid + roi: + $ref: '#/components/schemas/FeatureCollection' + requestJobId: + type: string + format: uuid + required: + - links + - expirationTime + - fileSize + - recordCatalogId + - roi + - requestJobId + - status error: type: object required: @@ -239,7 +366,54 @@ components: description: List of supported enum: - EPSG:4326 - BBOX: + Feature: + required: + - type + - properties + - geometry + properties: + type: + type: string + enum: + - Feature + properties: + type: object + required: + - maxResolutionDeg + properties: + maxResolutionDeg: + type: number + minimum: 0.00000009 + maximum: 0.072 + format: double + description: max resolution of layer in degrees/pixel + # nullable: true + geometry: + type: object + nullable: true + oneOf: + - $ref: '#/components/schemas/GeometryCollection' + - $ref: '#/components/schemas/Geometry' + bbox: + $ref: '#/components/schemas/BBox' + FeatureCollection: + type: object + description: GeoJson Feature collection + required: + - type + - features + properties: + type: + type: string + enum: + - FeatureCollection + features: + type: array + items: + $ref: '#/components/schemas/Feature' + bbox: + $ref: '#/components/schemas/BBox' + BBox: type: array items: type: number @@ -265,13 +439,30 @@ components: - Failed - Expired - Aborted + GeometryCollection: + type: object + description: GeoJSon geometry collection + required: + - type + - geometries + externalDocs: + url: http://geojson.org/geojson-spec.html#geometrycollection + properties: + type: + type: string + enum: + - GeometryCollection + description: custom properties + geometries: + type: array + items: + $ref: '#/components/schemas/GeometryBase' Geometry: description: GeoJSon geometry - discriminator: - propertyName: type type: object - allOf: + oneOf: - $ref: '#/components/schemas/Polygon' + - $ref: '#/components/schemas/MultiPolygon' Polygon: type: object description: GeoJSon geometry @@ -286,6 +477,22 @@ components: type: array items: $ref: '#/components/schemas/Point2D' + MultiPolygon: + type: object + description: GeoJSon geometry + externalDocs: + url: http://geojson.org/geojson-spec.html#id6 + allOf: + - $ref: '#/components/schemas/GeometryBase' + - properties: + coordinates: + type: array + items: + type: array + items: + type: array + items: + $ref: '#/components/schemas/Point2D' GeometryBase: type: object description: GeoJSon geometry @@ -298,7 +505,21 @@ components: type: string enum: - Polygon + - MultiPolygon description: the geometry type + callbackLinks: + type: object + description: gpkg links - the geoPackage + metadata.json + required: + - dataURI + - metadataURI + properties: + dataURI: + type: string + description: Url to download the gpkg + metadataURI: + type: string + description: Url to download the gpkg's metadata.json Point2D: type: array maxItems: 2 diff --git a/package-lock.json b/package-lock.json index 7e9f6f6..707cfe1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17,7 +17,7 @@ "@map-colonies/js-logger": "^0.0.5", "@map-colonies/mc-model-types": "^14.0.1", "@map-colonies/mc-priority-queue": "^4.0.3", - "@map-colonies/mc-utils": "^1.6.1", + "@map-colonies/mc-utils": "^1.7.1", "@map-colonies/openapi-express-viewer": "^2.0.1", "@map-colonies/read-pkg": "0.0.1", "@map-colonies/telemetry": "3.1.0", @@ -36,6 +36,7 @@ "express": "^4.18.1", "express-openapi-validator": "^5.0.0", "http-status-codes": "^2.2.0", + "md5": "^2.3.0", "reflect-metadata": "^0.1.13", "swagger-ui-express": "^4.1.6", "tsyringe": "^4.7.0" @@ -54,6 +55,7 @@ "@types/jest": "^28.1.7", "@types/js-yaml": "^4.0.3", "@types/lodash": "^4.14.184", + "@types/md5": "^2.3.2", "@types/multer": "^1.4.7", "@types/supertest": "^2.0.12", "@types/swagger-ui-express": "^4.1.3", @@ -2988,9 +2990,9 @@ } }, "node_modules/@map-colonies/mc-utils": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/@map-colonies/mc-utils/-/mc-utils-1.6.1.tgz", - "integrity": "sha512-17FntiMLg5nza4XDfNNTwr8WgF8WypfJD2zZESKySDkragC77L0J+hhTNtb8uay3VpFefh3WocElklLf6ghfFQ==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@map-colonies/mc-utils/-/mc-utils-1.7.1.tgz", + "integrity": "sha512-6n3hirFeiX3kpMN1/RwXFSaMocoQZuJ5nF+15ocKfiUnpXY6TVl/1+zE7/L1pDuYxNwcwsvxZjNk0HYvcWgVcA==", "dependencies": { "@map-colonies/error-types": "^1.1.5", "@map-colonies/js-logger": "^0.0.5", @@ -5435,6 +5437,12 @@ "integrity": "sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==", "dev": true }, + "node_modules/@types/md5": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@types/md5/-/md5-2.3.2.tgz", + "integrity": "sha512-v+JFDu96+UYJ3/UWzB0mEglIS//MZXgRaJ4ubUPwOM0gvLc/kcQ3TWNYwENEK7/EcXGQVrW8h/XqednSjBd/Og==", + "dev": true + }, "node_modules/@types/mime": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz", @@ -7006,6 +7014,14 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "node_modules/charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==", + "engines": { + "node": "*" + } + }, "node_modules/check-disk-space": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/check-disk-space/-/check-disk-space-3.3.1.tgz", @@ -7894,9 +7910,9 @@ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, "node_modules/cookiejar": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", - "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz", + "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", "dev": true }, "node_modules/copyfiles": { @@ -8026,6 +8042,14 @@ "node": ">= 8" } }, + "node_modules/crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==", + "engines": { + "node": "*" + } + }, "node_modules/cz-conventional-changelog": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/cz-conventional-changelog/-/cz-conventional-changelog-3.3.0.tgz", @@ -12371,6 +12395,21 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/md5": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", + "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", + "dependencies": { + "charenc": "0.0.2", + "crypt": "0.0.2", + "is-buffer": "~1.1.6" + } + }, + "node_modules/md5/node_modules/is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + }, "node_modules/media-typer": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", @@ -18365,9 +18404,9 @@ } }, "@map-colonies/mc-utils": { - "version": "1.6.1", - "resolved": "https://registry.npmjs.org/@map-colonies/mc-utils/-/mc-utils-1.6.1.tgz", - "integrity": "sha512-17FntiMLg5nza4XDfNNTwr8WgF8WypfJD2zZESKySDkragC77L0J+hhTNtb8uay3VpFefh3WocElklLf6ghfFQ==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@map-colonies/mc-utils/-/mc-utils-1.7.1.tgz", + "integrity": "sha512-6n3hirFeiX3kpMN1/RwXFSaMocoQZuJ5nF+15ocKfiUnpXY6TVl/1+zE7/L1pDuYxNwcwsvxZjNk0HYvcWgVcA==", "requires": { "@map-colonies/error-types": "^1.1.5", "@map-colonies/js-logger": "^0.0.5", @@ -20327,6 +20366,12 @@ "integrity": "sha512-BdZ5BCCvho3EIXw6wUCXHe7rS53AIDPLE+JzwgT+OsJk53oBfbSmZZ7CX4VaRoN78N+TJpFi9QPlfIVNmJYWxQ==", "dev": true }, + "@types/md5": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@types/md5/-/md5-2.3.2.tgz", + "integrity": "sha512-v+JFDu96+UYJ3/UWzB0mEglIS//MZXgRaJ4ubUPwOM0gvLc/kcQ3TWNYwENEK7/EcXGQVrW8h/XqednSjBd/Og==", + "dev": true + }, "@types/mime": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-3.0.1.tgz", @@ -21493,6 +21538,11 @@ "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", "dev": true }, + "charenc": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/charenc/-/charenc-0.0.2.tgz", + "integrity": "sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==" + }, "check-disk-space": { "version": "3.3.1", "resolved": "https://registry.npmjs.org/check-disk-space/-/check-disk-space-3.3.1.tgz", @@ -22188,9 +22238,9 @@ "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" }, "cookiejar": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.3.tgz", - "integrity": "sha512-JxbCBUdrfr6AQjOXrxoTvAMJO4HBTUIlBzslcJPAz+/KT8yk53fXun51u+RenNYvad/+Vc2DIz5o9UxlCDymFQ==", + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/cookiejar/-/cookiejar-2.1.4.tgz", + "integrity": "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw==", "dev": true }, "copyfiles": { @@ -22296,6 +22346,11 @@ "which": "^2.0.1" } }, + "crypt": { + "version": "0.0.2", + "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", + "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==" + }, "cz-conventional-changelog": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/cz-conventional-changelog/-/cz-conventional-changelog-3.3.0.tgz", @@ -25572,6 +25627,23 @@ "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", "dev": true }, + "md5": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/md5/-/md5-2.3.0.tgz", + "integrity": "sha512-T1GITYmFaKuO91vxyoQMFETst+O71VUPEU3ze5GNzDm0OWdP8v1ziTaAEPUr/3kLsY3Sftgz242A1SetQiDL7g==", + "requires": { + "charenc": "0.0.2", + "crypt": "0.0.2", + "is-buffer": "~1.1.6" + }, + "dependencies": { + "is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==" + } + } + }, "media-typer": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-1.1.0.tgz", diff --git a/package.json b/package.json index 280d4ba..e937053 100644 --- a/package.json +++ b/package.json @@ -46,7 +46,7 @@ "@map-colonies/js-logger": "^0.0.5", "@map-colonies/mc-model-types": "^14.0.1", "@map-colonies/mc-priority-queue": "^4.0.3", - "@map-colonies/mc-utils": "^1.6.1", + "@map-colonies/mc-utils": "^1.7.1", "@map-colonies/openapi-express-viewer": "^2.0.1", "@map-colonies/read-pkg": "0.0.1", "@map-colonies/telemetry": "3.1.0", @@ -65,6 +65,7 @@ "express": "^4.18.1", "express-openapi-validator": "^5.0.0", "http-status-codes": "^2.2.0", + "md5": "^2.3.0", "reflect-metadata": "^0.1.13", "swagger-ui-express": "^4.1.6", "tsyringe": "^4.7.0" @@ -83,6 +84,7 @@ "@types/jest": "^28.1.7", "@types/js-yaml": "^4.0.3", "@types/lodash": "^4.14.184", + "@types/md5": "^2.3.2", "@types/multer": "^1.4.7", "@types/supertest": "^2.0.12", "@types/swagger-ui-express": "^4.1.3", diff --git a/src/clients/callbackClient.ts b/src/clients/callbackClient.ts index fb0cd62..b97eddf 100644 --- a/src/clients/callbackClient.ts +++ b/src/clients/callbackClient.ts @@ -2,7 +2,7 @@ import { inject, singleton } from 'tsyringe'; import { HttpClient, IHttpRetryConfig } from '@map-colonies/mc-utils'; import { Logger } from '@map-colonies/js-logger'; import { SERVICES } from '../common/constants'; -import { ICallbackData, IConfig } from '../common/interfaces'; +import { ICallbackData, ICallbackExportData, IConfig } from '../common/interfaces'; @singleton() export class CallbackClient extends HttpClient { @@ -10,7 +10,7 @@ export class CallbackClient extends HttpClient { super(logger, '', 'requestCallback', config.get('httpRetry')); } - public async send(callbackUrl: string, data: ICallbackData): Promise { + public async send(callbackUrl: string, data: ICallbackData | ICallbackExportData): Promise { this.logger.info(data, `Sending callback request to URL: "${callbackUrl}"`); await this.post(callbackUrl, data); } diff --git a/src/clients/jobManagerWrapper.ts b/src/clients/jobManagerWrapper.ts index 4fb1833..4923355 100644 --- a/src/clients/jobManagerWrapper.ts +++ b/src/clients/jobManagerWrapper.ts @@ -4,20 +4,26 @@ import { Logger } from '@map-colonies/js-logger'; import booleanEqual from '@turf/boolean-equal'; import bboxPolygon from '@turf/bbox-polygon'; import { JobManagerClient, OperationStatus } from '@map-colonies/mc-priority-queue'; -import { getUTCDate } from '@map-colonies/mc-utils'; +import { featureCollectionBooleanEqual, getUTCDate } from '@map-colonies/mc-utils'; import { SERVICES } from '../common/constants'; import { + CreateExportJobBody, CreateJobBody, + ExportVersion, ICreateJobResponse, + IJobExportParameters, IJobParameters, ITaskParameters, + IWorkerExportInput, IWorkerInput, JobDuplicationParams, + JobExportDuplicationParams, + JobExportResponse, JobResponse, TaskResponse, } from '../common/interfaces'; //this is the job manager api for find job DO NOT MODIFY -interface IFindJob { +export interface IFindJob { resourceId?: string; version?: string; isCleaned?: string; @@ -49,6 +55,9 @@ export class JobManagerWrapper extends JobManagerClient { this.jobDomain = config.get('jobManager.jobDomain'); } + /** + * @deprecated The method should not be used + */ public async create(data: IWorkerInput): Promise { const expirationDate = new Date(); expirationDate.setDate(expirationDate.getDate() + this.expirationDays); @@ -62,6 +71,7 @@ export class JobManagerWrapper extends JobManagerClient { parameters: { sanitizedBbox: data.sanitizedBbox, targetResolution: data.targetResolution, + exportVersion: ExportVersion.GETMAP, zoomLevel: data.zoomLevel, callbacks: data.callbacks, crs: data.crs, @@ -94,6 +104,55 @@ export class JobManagerWrapper extends JobManagerClient { }; } + public async createExport(data: IWorkerExportInput): Promise { + const expirationDate = new Date(); + expirationDate.setDate(expirationDate.getDate() + this.expirationDays); + + const jobParameters: IJobExportParameters = { + roi: data.roi, + callbacks: data.callbacks, + crs: data.crs, + exportVersion: ExportVersion.ROI, + fileNamesTemplates: data.fileNamesTemplates, + relativeDirectoryPath: data.relativeDirectoryPath, + gpkgEstimatedSize: data.gpkgEstimatedSize, + }; + + const createJobRequest: CreateExportJobBody = { + resourceId: data.cswProductId, + version: data.version, + type: this.tilesJobType, + expirationDate, + domain: this.jobDomain, + parameters: jobParameters, + internalId: data.dbId, + productType: data.productType, + productName: data.cswProductId, + priority: data.priority, + status: OperationStatus.IN_PROGRESS, + additionalIdentifiers: data.relativeDirectoryPath, + tasks: [ + { + type: this.tilesTaskType, + parameters: { + batches: data.batches, + sources: data.sources, + }, + }, + ], + }; + const res = await this.createJob(createJobRequest); + const createJobResponse: ICreateJobResponse = { + id: res.id, + taskIds: res.taskIds, + status: OperationStatus.IN_PROGRESS, + }; + return createJobResponse; + } + + /** + * @deprecated The method should not be used + */ public async findCompletedJob(jobParams: JobDuplicationParams): Promise { const queryParams: IFindJob = { resourceId: jobParams.resourceId, @@ -112,6 +171,31 @@ export class JobManagerWrapper extends JobManagerClient { return undefined; } + public async findExportJob( + status: OperationStatus, + jobParams: JobExportDuplicationParams, + shouldReturnTasks = false + ): Promise { + const queryParams: IFindJob = { + resourceId: jobParams.resourceId, + version: jobParams.version, + isCleaned: 'false', + type: this.tilesJobType, + shouldReturnTasks: shouldReturnTasks ? 'true' : 'false', + status, + }; + const jobs = await this.getExportJobs(queryParams); + if (jobs) { + const matchingJob = this.findExportJobWithMatchingParams(jobs, jobParams); + return matchingJob; + } + + return undefined; + } + + /** + * @deprecated The method should not be used + */ public async findInProgressJob(jobParams: JobDuplicationParams): Promise { const queryParams: IFindJob = { resourceId: jobParams.resourceId, @@ -131,6 +215,9 @@ export class JobManagerWrapper extends JobManagerClient { return undefined; } + /** + * @deprecated The method should not be used + */ public async findPendingJob(jobParams: JobDuplicationParams): Promise { const queryParams: IFindJob = { resourceId: jobParams.resourceId, @@ -155,6 +242,9 @@ export class JobManagerWrapper extends JobManagerClient { return tasks; } + /** + * @deprecated GetMap API - will be deprecated on future + */ public async getInProgressJobs(shouldReturnTasks = false): Promise { const queryParams: IFindJob = { isCleaned: 'false', @@ -180,7 +270,7 @@ export class JobManagerWrapper extends JobManagerClient { const newExpirationDate = getUTCDate(); newExpirationDate.setDate(newExpirationDate.getDate() + this.expirationDays); - const job = await this.get(getOrUpdateURL); + const job = await this.get(getOrUpdateURL); if (job) { const oldExpirationDate = new Date(job.expirationDate as Date); if (oldExpirationDate < newExpirationDate) { @@ -195,12 +285,34 @@ export class JobManagerWrapper extends JobManagerClient { } } + public async getExportJobs(queryParams: IFindJob): Promise { + this.logger.debug({ ...queryParams }, `Getting jobs that match these parameters`); + const jobs = await this.get('/jobs', queryParams as unknown as Record); + const exportJobs = jobs?.filter((job) => { + if (job.parameters.exportVersion === ExportVersion.ROI) { + return job; + } + }); + return exportJobs; + } + + /** + * @deprecated GetMap API - will be deprecated on future + */ private async getJobs(queryParams: IFindJob): Promise { - this.logger.debug(queryParams, 'Getting jobs that match these parameters'); + this.logger.debug({ ...queryParams }, `Getting jobs that match these parameters`); const jobs = await this.get('/jobs', queryParams as unknown as Record); - return jobs; + const exportJobs = jobs?.filter((job) => { + if (job.parameters.exportVersion === ExportVersion.GETMAP) { + return job; + } + }); + return exportJobs; } + /** + * @deprecated GetMap API - will be deprecated on future + */ private findJobWithMatchingParams(jobs: JobResponse[], jobParams: JobDuplicationParams): JobResponse | undefined { const matchingJob = jobs.find( (job) => @@ -212,4 +324,15 @@ export class JobManagerWrapper extends JobManagerClient { ); return matchingJob; } + + private findExportJobWithMatchingParams(jobs: JobExportResponse[], jobParams: JobExportDuplicationParams): JobExportResponse | undefined { + const matchingJob = jobs.find( + (job) => + job.internalId === jobParams.dbId && + job.version === jobParams.version && + job.parameters.crs === jobParams.crs && + featureCollectionBooleanEqual(job.parameters.roi, jobParams.roi) + ); + return matchingJob; + } } diff --git a/src/common/interfaces.ts b/src/common/interfaces.ts index 5430d83..daa9e79 100644 --- a/src/common/interfaces.ts +++ b/src/common/interfaces.ts @@ -1,4 +1,4 @@ -import { MultiPolygon, Polygon, BBox } from '@turf/turf'; +import { MultiPolygon, Polygon, BBox, FeatureCollection, Geometry } from '@turf/turf'; import { ICreateJobBody, IJobResponse, ITaskResponse, OperationStatus } from '@map-colonies/mc-priority-queue'; import { ITileRange } from '@map-colonies/mc-utils'; @@ -14,30 +14,46 @@ export interface OpenApiConfig { uiPath: string; } -export interface ICreatePackage { +export interface IBaseCreatePackage { dbId: string; - targetResolution?: number; - crs?: string; callbackURLs: string[]; - bbox?: BBox | Polygon; + crs?: string; priority?: number; } -export interface ICallbackTarget { +/** + * @deprecated GetMap API - will be deprecated on future + */ +export interface ICreatePackage extends IBaseCreatePackage { + targetResolution?: number; + bbox?: BBox | Polygon | MultiPolygon; +} + +export interface ICreatePackageRoi extends IBaseCreatePackage { + roi?: FeatureCollection; +} + +export interface ICallbackBase { url: string; +} + +/** + * @deprecated GetMap API - will be deprecated on future + */ +export interface ICallbackTarget extends ICallbackBase { bbox: BBox | Polygon; } -export interface IWorkerInput { +export interface ICallbackTargetExport extends ICallbackBase { + roi: FeatureCollection; +} + +export interface IWorkerInputBase { dbId: string; - targetResolution: number; - fileName: string; relativeDirectoryPath: string; + exportVersion: ExportVersion; priority?: number; - callbacks: ICallbackTarget[]; crs: string; - sanitizedBbox: BBox; - zoomLevel: number; version: string; cswProductId: string; productType: string; @@ -46,6 +62,23 @@ export interface IWorkerInput { gpkgEstimatedSize?: number; } +/** + * @deprecated GetMap API - will be deprecated on future + */ +export interface IWorkerInput extends IWorkerInputBase { + targetResolution: number; + fileName: string; + callbacks: ICallbackTarget[]; + sanitizedBbox: BBox; + zoomLevel: number; +} + +export interface IWorkerExportInput extends IWorkerInputBase { + callbacks: ICallbackTargetExport[]; + roi: FeatureCollection; + fileNamesTemplates: ILinkDefinition; +} + export interface IBasicResponse { message: string; } @@ -56,6 +89,9 @@ export interface ICreateJobResponse { status: OperationStatus.IN_PROGRESS | OperationStatus.COMPLETED; } +/** + * @deprecated GetMap API - will be deprecated on future + */ export interface ICallbackDataBase { fileUri: string; expirationTime: Date; @@ -68,14 +104,45 @@ export interface ICallbackDataBase { errorReason?: string; } +export interface ICallbackDataExportBase { + links: ILinkDefinition; + expirationTime: Date; + fileSize: number; + recordCatalogId: string; + requestJobId: string; + errorReason?: string; +} + +/** + * @deprecated GetMap API - will be deprecated on future + */ export interface ICallbackData extends ICallbackDataBase { - bbox: BBox | Polygon; + bbox: BBox | Polygon | MultiPolygon; +} + +export interface ICallbackExportData extends ICallbackDataExportBase { + roi: FeatureCollection; } +/** + * @deprecated GetMap API - will be deprecated on future + */ export interface ICallbackResposne extends ICallbackData { status: OperationStatus.IN_PROGRESS | OperationStatus.COMPLETED; } +export interface ILinkDefinition { + dataURI: string; + metadataURI: string; +} + +export interface ICallbackExportResponse extends ICallbackExportData { + status: OperationStatus.IN_PROGRESS | OperationStatus.COMPLETED | OperationStatus.FAILED; +} + +/** + * @deprecated GetMap API - will be deprecated on future + */ export interface JobDuplicationParams { resourceId: string; version: string; @@ -85,10 +152,22 @@ export interface JobDuplicationParams { sanitizedBbox: BBox; } +export interface JobExportDuplicationParams { + resourceId: string; + version: string; + dbId: string; + crs: string; + roi: FeatureCollection; +} + +/** + * @deprecated GetMap API - will be deprecated on future + */ export interface IJobParameters { targetResolution: number; relativeDirectoryPath: string; crs: string; + exportVersion: ExportVersion; callbacks: ICallbackTarget[]; sanitizedBbox: BBox; zoomLevel: number; @@ -97,6 +176,17 @@ export interface IJobParameters { gpkgEstimatedSize?: number; } +export interface IJobExportParameters { + relativeDirectoryPath: string; + crs: string; + exportVersion: ExportVersion; + roi: FeatureCollection; + callbacks: ICallbackTargetExport[]; + callbackParams?: ICallbackExportResponse; + fileNamesTemplates: ILinkDefinition; + gpkgEstimatedSize?: number; +} + export declare type MergerSourceType = 'S3' | 'GPKG' | 'FS'; export interface IMapSource { @@ -114,6 +204,9 @@ export interface ITaskParameters { sources: IMapSource[]; } +/** + * @deprecated GetMap API - will be deprecated on future + */ export interface IInput { jobId: string; footprint?: Polygon | MultiPolygon; @@ -124,11 +217,19 @@ export interface IInput { dbId: string; } +/** + * @deprecated GetMap API - will be deprecated on future + */ export interface IJobStatusResponse { completedJobs: JobResponse[] | undefined; failedJobs: JobResponse[] | undefined; } +export interface IExportJobStatusResponse { + completedJobs: JobExportResponse[] | undefined; + failedJobs: JobExportResponse[] | undefined; +} + export interface IStorageStatusResponse { free: number; size: number; @@ -141,6 +242,33 @@ export interface IStorageEstimation { validateStorageSize: boolean; } +export interface IGeometryRecordBase { + zoomLevel: number; + sanitizedBox?: BBox | null | undefined; +} + +export interface IGeometryRecord extends IGeometryRecordBase { + geometry?: Geometry; + targetResolutionDeg: number; + targetResolutionMeter: number; +} + +// todo - Temporary enum to define old\new api - will be removed after deleting getMap API +export enum ExportVersion { + GETMAP = 'GETMAP', + ROI = 'ROI', +} + +/** + * @deprecated GetMap API - will be deprecated on future + */ export type JobResponse = IJobResponse; export type TaskResponse = ITaskResponse; +/** + * @deprecated GetMap API - will be deprecated on future + */ export type CreateJobBody = ICreateJobBody; + +// new API based on multi resolution +export type JobExportResponse = IJobResponse; +export type CreateExportJobBody = ICreateJobBody; diff --git a/src/common/utils.ts b/src/common/utils.ts index 1690578..87ceea1 100644 --- a/src/common/utils.ts +++ b/src/common/utils.ts @@ -1,9 +1,13 @@ +/* eslint-disable @typescript-eslint/naming-convention */ + import { promises as fsPromise } from 'fs'; import { parse as parsePath } from 'path'; import { sep } from 'path'; import checkDiskSpace from 'check-disk-space'; -import { ITileRange } from '@map-colonies/mc-utils'; -import { IStorageStatusResponse } from './interfaces'; +import { degreesPerPixelToZoomLevel, ITileRange, zoomLevelToResolutionMeter } from '@map-colonies/mc-utils'; +import { FeatureCollection, Geometry } from '@turf/helpers'; +import md5 from 'md5'; +import { IGeometryRecord, IStorageStatusResponse } from './interfaces'; export const getFileSize = async (filePath: string): Promise => { const fileSizeInBytes = (await fsPromise.stat(filePath)).size; @@ -26,6 +30,11 @@ export const getGpkgFullPath = (gpkgsLocation: string, packageName: string, sepa return packageFullPath; }; +export const concatFsPaths = (..._dirs: string[]): string => { + const fullPath: string = _dirs.join(sep); + return fullPath; +}; + export const getStorageStatus = async (gpkgsLocation: string): Promise => { return checkDiskSpace(gpkgsLocation); }; @@ -41,3 +50,32 @@ export const calculateEstimateGpkgSize = (batches: ITileRange[], tileEstimatedSi const gpkgEstimatedSize = totalTilesCount * tileEstimatedSize; return gpkgEstimatedSize; }; + +/** + * generated unique hashed string value for FeatureCollection geography - notice! features order influence on hashing + * @param geo FeatureCollection object + * @returns md5 hashed string + */ +export const generateGeoIdentifier = (geo: FeatureCollection): string => { + const stringifiedGeo = JSON.stringify(geo); + const additionalIdentifiers = md5(stringifiedGeo); + return additionalIdentifiers; +}; + +export const parseFeatureCollection = (featuresCollection: FeatureCollection): IGeometryRecord[] => { + const parsedGeoRecord: IGeometryRecord[] = []; + featuresCollection.features.forEach((feature) => { + if (feature.properties && (feature.properties.maxResolutionDeg as number)) { + const targetResolutionDeg = feature.properties.maxResolutionDeg as number; + const zoomLevel = degreesPerPixelToZoomLevel(targetResolutionDeg); + const targetResolutionMeter = zoomLevelToResolutionMeter(zoomLevel) as number; + parsedGeoRecord.push({ + geometry: feature.geometry as Geometry, + targetResolutionDeg, + targetResolutionMeter, + zoomLevel, + }); + } + }); + return parsedGeoRecord; +}; diff --git a/src/createPackage/controllers/createPackageController.ts b/src/createPackage/controllers/createPackageController.ts index 46d9420..94d3a37 100644 --- a/src/createPackage/controllers/createPackageController.ts +++ b/src/createPackage/controllers/createPackageController.ts @@ -5,9 +5,20 @@ import httpStatus from 'http-status-codes'; import { injectable, inject } from 'tsyringe'; import { SERVICES } from '../../common/constants'; import { CreatePackageManager } from '../models/createPackageManager'; -import { IBasicResponse, ICreatePackage, ICreateJobResponse, ICallbackResposne } from '../../common/interfaces'; +import { + IBasicResponse, + ICreatePackage, + ICreateJobResponse, + ICallbackResposne, + ICreatePackageRoi, + ICallbackExportResponse, +} from '../../common/interfaces'; -type CreatePackageHandler = RequestHandler; +type CreatePackageHandler = RequestHandler< + undefined, + IBasicResponse | ICreateJobResponse | ICallbackResposne | ICallbackExportResponse, + ICreatePackage | ICreatePackageRoi +>; @injectable() export class CreatePackageController { @@ -27,4 +38,15 @@ export class CreatePackageController { next(err); } }; + + public createPackageRoi: CreatePackageHandler = async (req, res, next) => { + const userInput: ICreatePackageRoi = req.body; + try { + this.logger.debug(userInput, `Creating package with user input`); + const jobCreated = await this.manager.createPackageRoi(userInput); + return res.status(httpStatus.OK).json(jobCreated); + } catch (err) { + next(err); + } + }; } diff --git a/src/createPackage/models/createPackageManager.ts b/src/createPackage/models/createPackageManager.ts index f1c137f..1345c98 100644 --- a/src/createPackage/models/createPackageManager.ts +++ b/src/createPackage/models/createPackageManager.ts @@ -7,21 +7,45 @@ import { BBox, bbox as PolygonBbox, intersect, + combine as featureCombine, bboxPolygon, FeatureCollection, Feature, + Geometry, featureCollection as createFeatureCollection, } from '@turf/turf'; import { inject, injectable } from 'tsyringe'; -import { degreesPerPixelToZoomLevel, ITileRange, snapBBoxToTileGrid, TileRanger } from '@map-colonies/mc-utils'; +import { degreesPerPixelToZoomLevel, featureCollectionBooleanEqual, ITileRange, snapBBoxToTileGrid, TileRanger } from '@map-colonies/mc-utils'; import { IJobResponse, OperationStatus } from '@map-colonies/mc-priority-queue'; import { BadRequestError, InsufficientStorage } from '@map-colonies/error-types'; import { isArray, isEmpty } from 'lodash'; import booleanEqual from '@turf/boolean-equal'; import { BBox2d } from '@turf/helpers/dist/js/lib/geojson'; import { ProductType, TileOutputFormat } from '@map-colonies/mc-model-types'; -import { IConfig, IStorageEstimation } from '../../../src/common/interfaces'; -import { calculateEstimateGpkgSize, getGpkgRelativePath, getStorageStatus, getGpkgNameWithoutExt } from '../../common/utils'; +import { feature, featureCollection } from '@turf/helpers'; +import { + ExportVersion, + ICallbackExportResponse, + ICallbackTargetExport, + IConfig, + ICreatePackageRoi, + IGeometryRecord, + IJobExportParameters, + ILinkDefinition, + IStorageEstimation, + IWorkerExportInput, + JobExportDuplicationParams, + JobExportResponse, +} from '../../common/interfaces'; +import { + calculateEstimateGpkgSize, + getGpkgRelativePath, + getStorageStatus, + getGpkgNameWithoutExt, + concatFsPaths, + parseFeatureCollection, + generateGeoIdentifier, +} from '../../common/utils'; import { RasterCatalogManagerClient } from '../../clients/rasterCatalogManagerClient'; import { DEFAULT_CRS, DEFAULT_PRIORITY, METADA_JSON_FILE_EXTENSION as METADATA_JSON_FILE_EXTENSION, SERVICES } from '../../common/constants'; import { @@ -69,6 +93,9 @@ export class CreatePackageManager { this.tilesProvider = this.tilesProvider.toUpperCase() as MergerSourceType; } + /** + * @deprecated GetMap API - will be deprecated on future + */ public async createPackage(userInput: ICreatePackage): Promise { const layer = await this.rasterCatalogManager.findLayer(userInput.dbId); const layerMetadata = layer.metadata; @@ -119,7 +146,7 @@ export class CreatePackageManager { const batches = this.generateTileGroups(polygon as Polygon, layerMetadata.footprint as Polygon | MultiPolygon, zoomLevel); const estimatesGpkgSize = calculateEstimateGpkgSize(batches, tileEstimatedSize); // size of requested gpkg export if (this.storageEstimation.validateStorageSize) { - const isEnoughStorage = await this.validateFreeSpace(estimatesGpkgSize); // todo - on current stage, the calculation estimated by jpeg sizes + const isEnoughStorage = await this.validateFreeSpace(estimatesGpkgSize); if (!isEnoughStorage) { throw new InsufficientStorage(`There isn't enough free disk space to executing export`); } @@ -151,11 +178,12 @@ export class CreatePackageManager { relativeDirectoryPath: getGpkgNameWithoutExt(packageName), zoomLevel, dbId, + exportVersion: ExportVersion.GETMAP, version: version, cswProductId: resourceId, crs: crs ?? DEFAULT_CRS, productType, - batches: batches, + batches, sources, priority: priority ?? DEFAULT_PRIORITY, callbacks: callbacks, @@ -165,8 +193,164 @@ export class CreatePackageManager { return jobCreated; } + public async createPackageRoi(userInput: ICreatePackageRoi): Promise { + const { dbId, crs, priority, callbackURLs } = userInput; + let roi = userInput.roi; + const layer = await this.rasterCatalogManager.findLayer(userInput.dbId); + const layerMetadata = layer.metadata; + if (!roi) { + // convert and wrap layer's footprint to featureCollection + const layerMaxResolutionDeg = layerMetadata.maxResolutionDeg; + const layerFeature = feature(layerMetadata.footprint as Geometry, { maxResolutionDeg: layerMaxResolutionDeg }); + roi = featureCollection([layerFeature]); + this.logger.info({ + catalogId: userInput.dbId, + productId: layerMetadata.productId, + productVersion: layerMetadata.productVersion, + productType: layerMetadata.productType, + callbackURLs, + msg: `ROI not provided, will use default layer's geometry`, + }); + } + + let { productId: resourceId, productVersion: version, productType, maxResolutionDeg: srcRes } = layerMetadata; + const featuresRecords = parseFeatureCollection(roi); + const tileEstimatedSize = this.getTileEstimatedSize(layerMetadata.tileOutputFormat as TileOutputFormat); + + resourceId = resourceId as string; + version = version as string; + productType = productType as ProductType; + srcRes = srcRes as number; + const maxZoom = degreesPerPixelToZoomLevel(srcRes); + + // ROI vs layer validation section - zoom + geo intersection + featuresRecords.forEach((record) => { + if (record.zoomLevel > maxZoom) { + throw new BadRequestError( + `The requested resolution ${record.targetResolutionDeg} is larger then then product resolution ${srcRes as number}` + ); + } + // generate sanitized bbox for each original feature + record.sanitizedBox = this.sanitizeBbox( + record.geometry as Polygon | MultiPolygon, + layerMetadata.footprint as Polygon | MultiPolygon, + record.zoomLevel + ); + if (!record.sanitizedBox) { + throw new BadRequestError( + `Requested ${JSON.stringify(record.geometry as Polygon | MultiPolygon)} has no intersection with requested layer ${ + layer.metadata.id as string + }` + ); + } + }); + + const layerBbox = PolygonBbox(roi); // bounding box of entire ROI + const dupParams: JobExportDuplicationParams = { + resourceId, + version, + dbId, + roi, + crs: crs ?? DEFAULT_CRS, + }; + + const callbacks = callbackURLs.map((url) => { url, roi }); + const duplicationExist = await this.checkForExportDuplicate(dupParams, callbacks); + if (duplicationExist && duplicationExist.status === OperationStatus.COMPLETED) { + const callbackParam = duplicationExist as ICallbackExportResponse; + this.logger.info({ + jobStatus: callbackParam.status, + jobId: callbackParam.requestJobId, + catalogId: callbackParam.recordCatalogId, + msg: `Found relevant cache for export request`, + }); + return duplicationExist; + } else if (duplicationExist) { + const jobResponse = duplicationExist as ICreateJobResponse; + this.logger.info({ jobId: jobResponse.id, status: jobResponse.status, msg: `Found exists relevant In-Progress job for export request` }); + return duplicationExist; + } + + const batches: ITileRange[] = []; + featuresRecords.forEach((record) => { + const recordBatches = this.generateTileGroups( + record.geometry as Polygon | MultiPolygon, + layerMetadata.footprint as Polygon | MultiPolygon, + record.zoomLevel + ); + batches.push(...recordBatches); + }); + const estimatesGpkgSize = calculateEstimateGpkgSize(batches, tileEstimatedSize); // size of requested gpkg export + if (this.storageEstimation.validateStorageSize) { + const isEnoughStorage = await this.validateFreeSpace(estimatesGpkgSize); + if (!isEnoughStorage) { + const message = `There isn't enough free disk space to executing export`; + this.logger.error({ + resourceId, + version, + dbId, + estimatesGpkgSize, + minZoomLevel: Math.min(...batches.map((batch) => batch.zoom)), + maxZoomLevel: Math.max(...batches.map((batch) => batch.zoom)), + msg: message, + }); + throw new InsufficientStorage(message); + } + } + const separator = this.getSeparator(); + const prefixPackageName = this.generateExportFileNames(productType, resourceId, version, featuresRecords); + const packageName = `${prefixPackageName}.gpkg`; + const metadataFileName = `${prefixPackageName}.json`; + const fileNamesTemplates: ILinkDefinition = { + dataURI: packageName, + metadataURI: metadataFileName, + }; + const additionalIdentifiers = generateGeoIdentifier(roi); + const packageRelativePath = `${additionalIdentifiers}${separator}${packageName}`; + const sources: IMapSource[] = [ + { + path: packageRelativePath, + type: 'GPKG', + extent: { + minX: layerBbox[0], + minY: layerBbox[1], + maxX: layerBbox[2], + maxY: layerBbox[3], + }, + }, + { + path: `${layerMetadata.id as string}${separator}${layerMetadata.displayPath as string}`, //tiles path + type: this.tilesProvider, + }, + ]; + const workerInput: IWorkerExportInput = { + roi, + fileNamesTemplates: fileNamesTemplates, + relativeDirectoryPath: additionalIdentifiers, + dbId, + exportVersion: ExportVersion.ROI, + version: version, + cswProductId: resourceId, + crs: crs ?? DEFAULT_CRS, + productType, + batches, + sources, + priority: priority ?? DEFAULT_PRIORITY, + callbacks: callbacks, + gpkgEstimatedSize: estimatesGpkgSize, + }; + const jobCreated = await this.jobManagerClient.createExport(workerInput); + return jobCreated; + } + + /** + * @deprecated GetMap API - will be deprecated on future + */ public async createJsonMetadata(fullGpkgPath: string, job: JobResponse): Promise { - this.logger.info(`Creating metadata.json file for gpkg in path "${fullGpkgPath}" for jobId ${job.id}`); + this.logger.info({ + jobId: job.id, + msg: `Creating metadata.json file for gpkg in path "${this.gpkgsLocation}/${fullGpkgPath}" for jobId ${job.id}`, + }); const record = await this.rasterCatalogManager.findLayer(job.internalId as string); const parsedPath = parsePath(fullGpkgPath); @@ -186,6 +370,68 @@ export class CreatePackageManager { await fsPromise.writeFile(metadataFilePath, recordMetadata); } + public async createExportJsonMetadata(job: JobExportResponse): Promise { + this.logger.info({ + jobId: job.id, + metadataRelativeDirectory: job.parameters.relativeDirectoryPath, + fileName: job.parameters.fileNamesTemplates.metadataURI, + msg: `Creating metadata file`, + }); + const record = await this.rasterCatalogManager.findLayer(job.internalId as string); + const featuresRecords = parseFeatureCollection(job.parameters.roi); + + const metadataFileName = job.parameters.fileNamesTemplates.metadataURI; + const directoryName = job.parameters.relativeDirectoryPath; + const metadataFullPath = concatFsPaths(this.gpkgsLocation, directoryName, metadataFileName); + const combinedFootprint = this.getExportedPackageFootprint( + job.parameters.roi.features as Feature[], + record.metadata.footprint as Polygon | MultiPolygon, + job.id + ); + record.metadata.footprint = combinedFootprint ? combinedFootprint : record.metadata.footprint; + const maxResolutionDeg = Math.max( + record.metadata.maxResolutionDeg as number, + Math.min(...featuresRecords.map((records) => records.targetResolutionDeg)) + ); + record.metadata.maxResolutionDeg = maxResolutionDeg; + const maxResolutionMeter = Math.max( + record.metadata.maxResolutionMeter as number, + Math.min(...featuresRecords.map((records) => records.targetResolutionMeter)) + ); + record.metadata.maxResolutionMeter = maxResolutionMeter; + + const layerPolygonPartFeatures = this.getExportedPackageLayerPolygonParts( + featuresRecords, + record.metadata.layerPolygonParts as FeatureCollection, + job.id + ); + const roiBbox = PolygonBbox(job.parameters.roi); + (record.metadata.layerPolygonParts as FeatureCollection) = { + ...(record.metadata.layerPolygonParts as FeatureCollection), + features: layerPolygonPartFeatures, + bbox: roiBbox, + }; + record.metadata.productBoundingBox = roiBbox.join(','); + + this.logger.debug({ ...record.metadata, metadataFullPath, jobId: job.id, msg: 'Metadata json file will be written to file' }); + const recordMetadata = JSON.stringify(record.metadata); + await fsPromise.writeFile(metadataFullPath, recordMetadata); + } + + private featuresFootprintIntersects( + features: Feature[], + footprint: Polygon | MultiPolygon + ): Feature[] { + const intersectedFeatures: Feature[] = []; + features.forEach((feature) => { + const intersected = intersect(feature, footprint); + if (intersected !== null) { + intersectedFeatures.push(intersected); + } + }); + return intersectedFeatures; + } + private async getFreeStorage(): Promise { const storageStatus: IStorageStatusResponse = await getStorageStatus(this.gpkgsLocation); let otherRunningJobsSize = 0; @@ -216,24 +462,24 @@ export class CreatePackageManager { return this.tilesProvider === 'S3' ? '/' : sep; } - private normalize2Polygon(bboxFromUser: Polygon | BBox | undefined): Polygon | undefined { + private normalize2Polygon(bboxFromUser: Polygon | MultiPolygon | BBox | undefined): Polygon | undefined { try { if (isArray(bboxFromUser) && bboxFromUser.length === CreatePackageManager.bboxLength2d) { - this.logger.debug(bboxFromUser, `Export will be executed by provided BBox from request input`); + this.logger.debug({ ...bboxFromUser, msg: `Export will be executed by provided BBox from request input` }); const resultPolygon = bboxPolygon(bboxFromUser as BBox); return resultPolygon.geometry; } else if (this.isAPolygon(bboxFromUser)) { - this.logger.debug(bboxFromUser, `Export will be executed by provided Footprint from request input`); + this.logger.debug({ ...bboxFromUser, msg: `Export will be executed by provided Footprint from request input` }); return bboxFromUser; } else if (!bboxFromUser) { this.logger.debug(`Export will be executed on entire layer's footprint`); return undefined; } else { - this.logger.warn(bboxFromUser, `Input bbox param illegal - should be bbox | polygon | null types`); + this.logger.warn({ ...bboxFromUser, msg: `Input bbox param illegal - should be bbox | polygon | null types` }); throw new BadRequestError('Input bbox param illegal - should be bbox | polygon | null types'); } } catch (error) { - this.logger.error(bboxFromUser, `Failed`); + this.logger.error({ bboxFromUser, msg: `Failed with error ${(error as Error).message}` }); throw new BadRequestError('Input bbox param illegal - should be bbox | polygon | null types'); } } @@ -253,7 +499,7 @@ export class CreatePackageManager { return isPolygon; } - private sanitizeBbox(polygon: Polygon, footprint: Polygon | MultiPolygon, zoom: number): BBox | null { + private sanitizeBbox(polygon: Polygon | MultiPolygon, footprint: Polygon | MultiPolygon, zoom: number): BBox | null { try { const intersaction = intersect(polygon, footprint); if (intersaction === null) { @@ -266,7 +512,7 @@ export class CreatePackageManager { } } - private generateTileGroups(polygon: Polygon, footprint: Polygon | MultiPolygon, zoom: number): ITileRange[] { + private generateTileGroups(polygon: Polygon | MultiPolygon, footprint: Polygon | MultiPolygon, zoom: number): ITileRange[] { let intersaction: Feature | null; try { @@ -310,6 +556,9 @@ export class CreatePackageManager { } } + /** + * @deprecated GetMap API - will be deprecated on future + */ private async checkForDuplicate( dupParams: JobDuplicationParams, callbackUrls: ICallbackTarget[] @@ -332,6 +581,31 @@ export class CreatePackageManager { return undefined; } + private async checkForExportDuplicate( + dupParams: JobExportDuplicationParams, + callbackUrls: ICallbackTargetExport[] + ): Promise { + let completedExists = await this.checkForExportCompleted(dupParams); + if (completedExists) { + return completedExists; + } + + const processingExists = await this.checkForExportProcessing(dupParams, callbackUrls); + if (processingExists) { + // For race condition + completedExists = await this.checkForExportCompleted(dupParams); + if (completedExists) { + return completedExists; + } + return processingExists; + } + + return undefined; + } + + /** + * @deprecated GetMap API - will be deprecated on future + */ private async checkForCompleted(dupParams: JobDuplicationParams): Promise { this.logger.info(dupParams, `Checking for COMPLETED duplications with parameters`); const responseJob = await this.jobManagerClient.findCompletedJob(dupParams); @@ -344,6 +618,21 @@ export class CreatePackageManager { } } + private async checkForExportCompleted(dupParams: JobExportDuplicationParams): Promise { + this.logger.info({ ...dupParams, roi: undefined, msg: `Checking for COMPLETED duplications with parameters` }); + const responseJob = await this.jobManagerClient.findExportJob(OperationStatus.COMPLETED, dupParams); + if (responseJob) { + await this.jobManagerClient.validateAndUpdateExpiration(responseJob.id); + return { + ...responseJob.parameters.callbackParams, + status: OperationStatus.COMPLETED, + } as ICallbackExportResponse; + } + } + + /** + * @deprecated GetMap API - will be deprecated on future + */ private async checkForProcessing(dupParams: JobDuplicationParams, newCallbacks: ICallbackTarget[]): Promise { this.logger.info(dupParams, `Checking for PROCESSING duplications with parameters`); const processingJob = (await this.jobManagerClient.findInProgressJob(dupParams)) ?? (await this.jobManagerClient.findPendingJob(dupParams)); @@ -358,6 +647,28 @@ export class CreatePackageManager { } } + private async checkForExportProcessing( + dupParams: JobExportDuplicationParams, + newCallbacks: ICallbackTargetExport[] + ): Promise { + this.logger.info({ ...dupParams, roi: undefined, msg: `Checking for PROCESSING duplications with parameters` }); + const processingJob = + (await this.jobManagerClient.findExportJob(OperationStatus.IN_PROGRESS, dupParams, true)) ?? + (await this.jobManagerClient.findExportJob(OperationStatus.PENDING, dupParams, true)); + if (processingJob) { + await this.updateExportCallbackURLs(processingJob, newCallbacks); + await this.jobManagerClient.validateAndUpdateExpiration(processingJob.id); + return { + id: processingJob.id, + taskIds: (processingJob.tasks as unknown as IJobResponse[]).map((t) => t.id), + status: OperationStatus.IN_PROGRESS, + }; + } + } + + /** + * @deprecated GetMap API - will be deprecated on future + */ private async updateCallbackURLs(processingJob: JobResponse, newCallbacks: ICallbackTarget[]): Promise { const callbacks = processingJob.parameters.callbacks; for (const newCallback of newCallbacks) { @@ -393,6 +704,31 @@ export class CreatePackageManager { }); } + private async updateExportCallbackURLs(processingJob: JobExportResponse, newCallbacks: ICallbackTargetExport[]): Promise { + const callbacks = processingJob.parameters.callbacks; + for (const newCallback of newCallbacks) { + const hasCallback = callbacks.findIndex((callback) => { + const exist = callback.url === newCallback.url; + if (!exist) { + return false; + } + + const sameROI = featureCollectionBooleanEqual(callback.roi, newCallback.roi); + return sameROI; + }); + // eslint-disable-next-line @typescript-eslint/no-magic-numbers + if (hasCallback === -1) { + callbacks.push(newCallback); + } + } + await this.jobManagerClient.updateJob(processingJob.id, { + parameters: processingJob.parameters, + }); + } + + /** + * @deprecated GetMap API - will be deprecated on future + */ private generatePackageName(productType: string, productId: string, productVersion: string, zoomLevel: number, bbox: BBox): string { const numberOfDecimals = 5; const bboxToString = bbox.map((val) => String(val.toFixed(numberOfDecimals)).replace('.', '_').replace(/-/g, 'm')).join(''); @@ -400,6 +736,13 @@ export class CreatePackageManager { return `${productType}_${productId}_${productVersionConvention}_${zoomLevel}_${bboxToString}.gpkg`; } + private generateExportFileNames(productType: string, productId: string, productVersion: string, featuresRecords: IGeometryRecord[]): string { + const maxZoom = Math.max(...featuresRecords.map((feature) => feature.zoomLevel)); + let currentDateStr = new Date().toJSON(); + currentDateStr = `${currentDateStr}`.replaceAll('-', '_').replaceAll('.', '_').replaceAll(':', '_'); + return `${productType}_${productId}_${productVersion.replaceAll('.', '_')}_${maxZoom}_${currentDateStr}`; + } + private extractPolygonParts(layerPolygonParts: FeatureCollection, sanitizedBboxPolygonzied: Feature): FeatureCollection { this.logger.debug(`Extracting layerPolygonParts from original record that intersects with sanitized bbox`); const newFeatures: Feature[] = []; @@ -412,9 +755,9 @@ export class CreatePackageManager { } }); - const newPolygonLarts = createFeatureCollection(newFeatures, { bbox: sanitizedBboxPolygonzied.bbox }); + const newPolygonParts = createFeatureCollection(newFeatures, { bbox: sanitizedBboxPolygonzied.bbox }); - return newPolygonLarts; + return newPolygonParts; } private getTileEstimatedSize(tileOutputFormat: TileOutputFormat): number { @@ -428,4 +771,44 @@ export class CreatePackageManager { return tileEstimatedSize; } + + // todo - add unittest + private getExportedPackageFootprint( + features: Feature[], + footprint: Polygon | MultiPolygon, + jobId: string + ): MultiPolygon | undefined { + let combinedFootprint = undefined; + try { + const intersectedFeatures = this.featuresFootprintIntersects(features, footprint); + const fc: FeatureCollection = featureCollection(intersectedFeatures); + combinedFootprint = featureCombine(fc).features[0].geometry as unknown as MultiPolygon; + } catch (error) { + this.logger.error({ jobId, msg: `Failed to match features intersection with footprint with error: ${(error as Error).message}` }); + } + return combinedFootprint; + } + + // todo - add unittest + private getExportedPackageLayerPolygonParts(featuresRecords: IGeometryRecord[], layerPolygonParts: FeatureCollection, jobId: string): Feature[] { + const layerPolygonPartFeatures: Feature[] = []; + for (const featureRecord of featuresRecords) { + for (const feature of layerPolygonParts.features) { + const intersectedFeature = intersect(featureRecord.geometry as Polygon | MultiPolygon, feature.geometry as Polygon | MultiPolygon); + if (!intersectedFeature) { + continue; + } + if (feature.properties?.Resolution !== undefined) { + const maxResolutionDeg = Math.max(featureRecord.targetResolutionDeg, feature.properties.Resolution as number); + // eslint-disable-next-line @typescript-eslint/naming-convention + intersectedFeature.properties = { ...feature.properties, Resolution: maxResolutionDeg }; + } else { + this.logger.error({ ...feature, jobId, msg: `LayerPolygonPart not include property of type 'Resolution` }); + throw new Error(`Layer's LayerPolygonPart value not include property of type 'Resolution`); + } + layerPolygonPartFeatures.push({ ...intersectedFeature }); + } + } + return layerPolygonPartFeatures; + } } diff --git a/src/createPackage/routes/createPackageRouter.ts b/src/createPackage/routes/createPackageRouter.ts index b399847..1749a1b 100644 --- a/src/createPackage/routes/createPackageRouter.ts +++ b/src/createPackage/routes/createPackageRouter.ts @@ -7,6 +7,7 @@ const createPackageRouterFactory: FactoryFunction = (dependencyContainer const controller = dependencyContainer.resolve(CreatePackageController); router.post('/', controller.create); + router.post('/roi', controller.createPackageRoi); return router; }; diff --git a/src/index.ts b/src/index.ts index 3e59cd8..c6b115d 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,7 +8,6 @@ import { container } from 'tsyringe'; import config from 'config'; import { DEFAULT_SERVER_PORT, SERVICES } from './common/constants'; import { PollingManager, POLLING_MANGER_SYMBOL } from './pollingManager'; - import { getApp } from './app'; interface IServerConfig { @@ -38,7 +37,7 @@ const mainPollLoop = async (): Promise => { try { polledData = await pollingManager.jobStatusPoll(); } catch (error) { - logger.error(error, `Main loop poll error occured`); + logger.error(error, `Main loop poll error occurred`); } finally { if (!polledData) { await new Promise((resolve) => setTimeout(resolve, pollingTimout)); diff --git a/src/pollingManager.ts b/src/pollingManager.ts index fb69080..1050557 100644 --- a/src/pollingManager.ts +++ b/src/pollingManager.ts @@ -16,23 +16,48 @@ export class PollingManager { public async jobStatusPoll(): Promise { let existsJobs = false; - const jobs = await this.taskManager.getJobsByTaskStatus(); + + const getMapJobs = await this.taskManager.getJobsByTaskStatus(); // for old getmap api - will be removed + const roiJobs = await this.taskManager.getExportJobsByTaskStatus(); // new api by roi, const expirationDate = new Date(); expirationDate.setDate(expirationDate.getDate() + this.expirationDays); - if (jobs.completedJobs?.length) { + + this.logger.debug({ ...getMapJobs, msg: `Handling GetMap jobs` }); + if (getMapJobs.completedJobs?.length != null) { existsJobs = true; - this.logger.info(`Completed jobs detected, running finalize job`); - for (const job of jobs.completedJobs) { + this.logger.debug({ msg: `GETMAP Completed GetMap jobs detected, running finalize job` }); + for (const job of getMapJobs.completedJobs) { + this.logger.info({ jobId: job.id, msg: `GETMAP Execute completed job finalizing on BBOX (GetMap) exporting for job: ${job.id}` }); await this.taskManager.finalizeJob(job, expirationDate); } - } else if (jobs.failedJobs?.length) { + } else if (getMapJobs.failedJobs?.length != null) { existsJobs = true; - this.logger.info(`Failed jobs detected, running finalize job`); - for (const job of jobs.failedJobs) { + this.logger.debug({ msg: `GETMAP Failed jobs detected, running finalize job` }); + for (const job of getMapJobs.failedJobs) { + this.logger.info({ jobId: job.id, msg: `GETMAP Execute Failed job finalizing on BBOX (GetMap) exporting for job: ${job.id}` }); const gpkgFailedErr = `failed to create gpkg, job: ${job.id}`; await this.taskManager.finalizeJob(job, expirationDate, false, gpkgFailedErr); } } + + this.logger.debug({ ...roiJobs, msg: `Handling ROI jobs` }); + if (roiJobs.completedJobs?.length != null) { + existsJobs = true; + this.logger.debug({ msg: `ROI Completed jobs detected, running finalize job` }); + for (const job of roiJobs.completedJobs) { + this.logger.info({ jobId: job.id, msg: `Execute completed job finalizing on ROI exporting for job: ${job.id}` }); + await this.taskManager.finalizeExportJob(job, expirationDate); + } + } else if (roiJobs.failedJobs?.length != null) { + existsJobs = true; + this.logger.debug({ msg: `ROI Failed jobs detected, running finalize job` }); + for (const job of roiJobs.failedJobs) { + this.logger.info({ jobId: job.id, msg: `Execute failed job finalizing on ROI exporting for job: ${job.id}` }); + const gpkgFailedErr = `failed to create gpkg, job: ${job.id}`; + await this.taskManager.finalizeExportJob(job, expirationDate, false, gpkgFailedErr); + } + } + return existsJobs; } } diff --git a/src/tasks/models/tasksManager.ts b/src/tasks/models/tasksManager.ts index 2b0178f..6c27669 100644 --- a/src/tasks/models/tasksManager.ts +++ b/src/tasks/models/tasksManager.ts @@ -3,10 +3,23 @@ import { inject, injectable } from 'tsyringe'; import config from 'config'; import { IUpdateJobBody, OperationStatus } from '@map-colonies/mc-priority-queue'; import { NotFoundError } from '@map-colonies/error-types'; -import { getGpkgFullPath, getGpkgRelativePath } from '../../common/utils'; +import { concatFsPaths, getGpkgFullPath, getGpkgRelativePath } from '../../common/utils'; import { SERVICES } from '../../common/constants'; -import { JobManagerWrapper } from '../../clients/jobManagerWrapper'; -import { ICallbackData, ICallbackDataBase, IJobParameters, IJobStatusResponse, JobResponse } from '../../common/interfaces'; +import { IFindJob, JobManagerWrapper } from '../../clients/jobManagerWrapper'; +import { + ICallbackData, + ICallbackDataBase, + ICallbackDataExportBase, + ICallbackExportData, + ICallbackExportResponse, + IExportJobStatusResponse, + IJobExportParameters, + IJobParameters, + IJobStatusResponse, + ILinkDefinition, + JobExportResponse, + JobResponse, +} from '../../common/interfaces'; import { CallbackClient } from '../../clients/callbackClient'; import { getFileSize } from '../../common/utils'; import { CreatePackageManager } from '../../createPackage/models/createPackageManager'; @@ -20,6 +33,7 @@ export interface ITaskStatusResponse { export class TasksManager { private readonly gpkgsLocation: string; private readonly downloadServerUrl: string; + private readonly tilesJobType: string; public constructor( @inject(SERVICES.LOGGER) private readonly logger: Logger, @inject(JobManagerWrapper) private readonly jobManagerClient: JobManagerWrapper, @@ -28,11 +42,31 @@ export class TasksManager { ) { this.gpkgsLocation = config.get('gpkgsLocation'); this.downloadServerUrl = config.get('downloadServerUrl'); + this.tilesJobType = config.get('workerTypes.tiles.jobType'); } + /** + * @deprecated GetMap API - will be deprecated on future + */ public async getJobsByTaskStatus(): Promise { const jobs = await this.jobManagerClient.getInProgressJobs(); + const completedJobs = jobs?.filter((job) => job.completedTasks === job.taskCount); + const failedJobs = jobs?.filter((job) => job.failedTasks === job.taskCount); + const jobsStatus = { + completedJobs: completedJobs, + failedJobs: failedJobs, + }; + return jobsStatus; + } + public async getExportJobsByTaskStatus(): Promise { + const queryParams: IFindJob = { + isCleaned: 'false', + type: this.tilesJobType, + shouldReturnTasks: 'false', + status: OperationStatus.IN_PROGRESS, + }; + const jobs = await this.jobManagerClient.getExportJobs(queryParams); const completedJobs = jobs?.filter((job) => job.completedTasks === job.taskCount); const failedJobs = jobs?.filter((job) => job.failedTasks === job.taskCount); const jobsStatus = { @@ -56,6 +90,9 @@ export class TasksManager { return statusResponse; } + /** + * @deprecated GetMap API - will be deprecated on future + */ public async sendCallbacks(job: JobResponse, expirationDate: Date, errorReason?: string): Promise { let fileUri = ''; let fileRelativePath = ''; @@ -93,16 +130,41 @@ export class TasksManager { promisesResponse.forEach((response, index) => { if (response.status === 'rejected') { // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - this.logger.error({ reason: response.reason, url: targetCallbacks[index].url }, `Failed to send callback to url`); + this.logger.error({ reason: response.reason, url: targetCallbacks[index].url, jobId: job.id, msg: `Failed to send callback to url` }); } }); return callbackParams; } catch (error) { - this.logger.error(error, `Sending callback has failed`); + this.logger.error({ jobId: job.id, err: error, reason: (error as Error).message, msg: `Sending callback has failed` }); } } + public async sendExportCallbacks(job: JobExportResponse, callbackParams: ICallbackDataExportBase): Promise { + try { + this.logger.info({ jobId: job.id, callbacks: job.parameters.callbacks, msg: `Sending callback for job: ${job.id}` }); + const targetCallbacks = job.parameters.callbacks; + const callbackPromises: Promise[] = []; + for (const target of targetCallbacks) { + const params: ICallbackExportData = { ...callbackParams, roi: job.parameters.roi }; + callbackPromises.push(this.callbackClient.send(target.url, params)); + } + + const promisesResponse = await Promise.allSettled(callbackPromises); + promisesResponse.forEach((response, index) => { + if (response.status === 'rejected') { + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + this.logger.error({ reason: response.reason, url: targetCallbacks[index].url, jobId: job.id, msg: `Failed to send callback to url` }); + } + }); + } catch (error) { + this.logger.error({ err: error, callbacksUrls: job.parameters.callbacks, jobId: job.id, msg: `Sending callback has failed` }); + } + } + + /** + * @deprecated GetMap API - will be deprecated on future + */ public async finalizeJob(job: JobResponse, expirationDate: Date, isSuccess = true, reason?: string): Promise { let updateJobParams: IUpdateJobBody = { status: isSuccess ? OperationStatus.COMPLETED : OperationStatus.FAILED, @@ -112,7 +174,7 @@ export class TasksManager { expirationDate: expirationDate, }; try { - this.logger.info(`Finzaling Job: ${job.id}`); + this.logger.info({ jobId: job.id, msg: `Finalize Job` }); const packageName = job.parameters.fileName; if (isSuccess) { const packageFullPath = getGpkgFullPath(this.gpkgsLocation, packageName); @@ -121,13 +183,105 @@ export class TasksManager { const callbackParams = await this.sendCallbacks(job, expirationDate, reason); updateJobParams = { ...updateJobParams, parameters: { ...job.parameters, callbackParams } }; - this.logger.info(`Update Job status to success=${String(isSuccess)} jobId=${job.id}`); + this.logger.info({ jobId: job.id, status: isSuccess, msg: `Update Job status` }); await this.jobManagerClient.updateJob(job.id, updateJobParams); } catch (error) { - this.logger.error(`Could not finalize job: ${job.id} updating failed job status, error: ${(error as Error).message}`); + this.logger.error({ + jobId: job.id, + err: error, + errorReason: (error as Error).message, + msg: `Could not finalize job, will updating to status failed`, + }); const callbackParams = await this.sendCallbacks(job, expirationDate, reason); updateJobParams = { ...updateJobParams, status: OperationStatus.FAILED, parameters: { ...job.parameters, callbackParams } }; await this.jobManagerClient.updateJob(job.id, updateJobParams); } } + + public async finalizeExportJob(job: JobExportResponse, expirationDate: Date, isSuccess = true, reason?: string): Promise { + let updateJobParams: IUpdateJobBody = { + reason, + /* eslint-disable-next-line @typescript-eslint/no-magic-numbers */ + percentage: isSuccess ? 100 : undefined, + status: isSuccess ? OperationStatus.COMPLETED : OperationStatus.FAILED, + expirationDate: expirationDate, + }; + try { + this.logger.info({ jobId: job.id, isSuccess, msg: `Finalize Job` }); + if (isSuccess) { + await this.packageManager.createExportJsonMetadata(job); + } + + // create and sending response to callbacks + const callbackSendParams = await this.generateCallbackParam(job, expirationDate, reason); + await this.sendExportCallbacks(job, callbackSendParams); + + // generate job finally completion with webhook (callback param) data + let finalizeStatus = OperationStatus.COMPLETED; + + if (reason !== undefined) { + finalizeStatus = OperationStatus.FAILED; + } + + const callbackParams: ICallbackExportResponse = { + ...callbackSendParams, + roi: job.parameters.roi, + status: finalizeStatus, + errorReason: reason, + }; + + updateJobParams = { ...updateJobParams, parameters: { ...job.parameters, callbackParams } }; + this.logger.info({ finalizeStatus, jobId: job.id, msg: `Updating job finalizing status` }); + } catch (error) { + this.logger.error({ jobId: job.id, err: error, reason: `${(error as Error).message}`, msg: `Could not finalize job` }); + updateJobParams = { ...updateJobParams, reason: JSON.stringify(error as Error), status: OperationStatus.FAILED }; + } finally { + await this.jobManagerClient.updateJob(job.id, updateJobParams); + } + } + + private async generateCallbackParam(job: JobExportResponse, expirationDate: Date, errorReason?: string): Promise { + let links: ILinkDefinition = { ...job.parameters.fileNamesTemplates }; // default file names in case of failure + this.logger.info({ jobId: job.id, msg: `generate callback body for job: ${job.id}` }); + + const packageName = job.parameters.fileNamesTemplates.dataURI; + const relativeFilesDirectory = job.parameters.relativeDirectoryPath; + const success = errorReason === undefined; + let fileSize = 0; + if (success) { + const packageFullPath = concatFsPaths(this.gpkgsLocation, relativeFilesDirectory, packageName); + // Todo - link shouldn't be hard-coded for each of his parts! temporary before webhooks implementation + links = { + dataURI: `${this.downloadServerUrl}/downloads/${relativeFilesDirectory}/${job.parameters.fileNamesTemplates.dataURI}`, + metadataURI: `${this.downloadServerUrl}/downloads/${relativeFilesDirectory}/${job.parameters.fileNamesTemplates.metadataURI}`, + }; + try { + fileSize = await getFileSize(packageFullPath); + } catch (error) { + this.logger.error({ + jobId: job.id, + err: error, + reason: `${(error as Error).message}`, + msg: `failed getting gpkg file size to ${packageFullPath}`, + }); + } + } + const callbackParams: ICallbackDataExportBase = { + links, + expirationTime: expirationDate, + fileSize, + recordCatalogId: job.internalId as string, + requestJobId: job.id, + errorReason, + }; + this.logger.info({ + links: callbackParams.links, + gpkgSize: callbackParams.fileSize, + catalogId: callbackParams.recordCatalogId, + jobId: job.id, + msg: `Finish generating callbackParams for job: ${job.id}`, + }); + this.logger.debug({ ...callbackParams, msg: `full callbackParam data` }); + return callbackParams; + } } diff --git a/tests/configurations/integration/jest.config.js b/tests/configurations/integration/jest.config.js index 699f447..047fe20 100644 --- a/tests/configurations/integration/jest.config.js +++ b/tests/configurations/integration/jest.config.js @@ -9,7 +9,15 @@ module.exports = { }, coverageReporters: ['text', 'html'], collectCoverage: true, - collectCoverageFrom: ['/src/**/*.ts', '!*/node_modules/', '!/vendor/**', '!*/common/**', '!**/models/**', '!/src/*'], + collectCoverageFrom: [ + '/src/**/*.ts', + '!*/node_modules/', + '!/vendor/**', + '!*/clients/*', + '!*/common/**', + '!**/models/**', + '!/src/*', + ], coverageDirectory: '/coverage', rootDir: '../../../.', testMatch: ['/tests/integration/**/*.spec.ts'], @@ -24,15 +32,14 @@ module.exports = { ], collectCoverage: true, moduleDirectories: ['node_modules', 'src'], - collectCoverageFrom: ['/src/**/*.{ts}', '!**/node_modules/**', '!**/vendor/**'], preset: 'ts-jest', testEnvironment: 'node', coverageThreshold: { global: { - branches: 80, - functions: 80, - lines: 80, - statements: -10, + branches: 100, + functions: 100, + lines: 100, + statements: 100, }, }, }; diff --git a/tests/configurations/unit/jest.config.js b/tests/configurations/unit/jest.config.js index 64da238..78293ce 100644 --- a/tests/configurations/unit/jest.config.js +++ b/tests/configurations/unit/jest.config.js @@ -14,7 +14,10 @@ module.exports = { '/src/**/*.ts', '!*/node_modules/', '!/vendor/**', - '!*/common/**', + '!*/common/dependencyRegistration.ts', + '!*/common/tracing.ts', + '!*/common/constants.ts', + '!*/clients/callbackClient.ts**', '!**/controllers/**', '!**/routes/**', '!/src/*', @@ -30,10 +33,10 @@ module.exports = { testEnvironment: 'node', coverageThreshold: { global: { - branches: 65, - functions: 74, - lines: 79, - statements: 79, + branches: 74, + functions: 91, + lines: 87, + statements: 87, }, }, }; diff --git a/tests/integration/createPackage/createExportPackage.spec.ts b/tests/integration/createPackage/createExportPackage.spec.ts new file mode 100644 index 0000000..7dba725 --- /dev/null +++ b/tests/integration/createPackage/createExportPackage.spec.ts @@ -0,0 +1,323 @@ +import httpStatusCodes from 'http-status-codes'; +import { OperationStatus } from '@map-colonies/mc-priority-queue'; +import { feature, featureCollection, Geometry } from '@turf/turf'; +import { getApp } from '../../../src/app'; +import { RasterCatalogManagerClient } from '../../../src/clients/rasterCatalogManagerClient'; +import { getContainerConfig, resetContainer } from '../testContainerConfig'; +import { ICreateJobResponse, ICreatePackageRoi, JobExportDuplicationParams } from '../../../src/common/interfaces'; +import { layerFromCatalog, fc1, fcNoMaxResolutionDeg, fcNoIntersection, fcTooHighResolution } from '../../mocks/data'; +import { JobManagerWrapper } from '../../../src/clients/jobManagerWrapper'; +import { CreatePackageManager } from '../../../src/createPackage/models/createPackageManager'; +import { CreatePackageSender } from './helpers/createPackageSender'; + +describe('Export by ROI', function () { + let requestSender: CreatePackageSender; + let findLayerSpy: jest.SpyInstance; + let createJobSpy: jest.SpyInstance; + let checkForExportDuplicateSpy: jest.SpyInstance; + let validateFreeSpaceSpy: jest.SpyInstance; + let checkForExportCompletedSpy: jest.SpyInstance; + let checkForExportProcessingSpy: jest.SpyInstance; + let generateTileGroupsSpy: jest.SpyInstance; + + beforeEach(function () { + const app = getApp({ + override: [...getContainerConfig()], + useChild: true, + }); + requestSender = new CreatePackageSender(app); + checkForExportDuplicateSpy = jest.spyOn( + CreatePackageManager.prototype as unknown as { checkForExportDuplicate: jest.Mock }, + 'checkForExportDuplicate' + ); + checkForExportCompletedSpy = jest.spyOn( + CreatePackageManager.prototype as unknown as { checkForExportCompleted: jest.Mock }, + 'checkForExportCompleted' + ); + checkForExportProcessingSpy = jest.spyOn( + CreatePackageManager.prototype as unknown as { checkForExportProcessing: jest.Mock }, + 'checkForExportProcessing' + ); + validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); + generateTileGroupsSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { generateTileGroups: jest.Mock }, 'generateTileGroups'); + findLayerSpy = jest.spyOn(RasterCatalogManagerClient.prototype, 'findLayer'); + createJobSpy = jest.spyOn(JobManagerWrapper.prototype, 'createJob'); + }); + + afterEach(function () { + resetContainer(); + jest.resetAllMocks(); + }); + + describe('Happy Path', function () { + it('should return 200 status code and the job created details', async function () { + const body: ICreatePackageRoi = { + dbId: layerFromCatalog.id, + roi: fc1, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + }; + findLayerSpy.mockResolvedValue(layerFromCatalog); + checkForExportDuplicateSpy.mockResolvedValue(undefined); + validateFreeSpaceSpy.mockResolvedValue(true); + generateTileGroupsSpy.mockReturnValue([]); + createJobSpy.mockResolvedValue({ id: 'b1c59730-c31d-4e44-9c67-4dbbb3b1c812', taskIds: ['6556896a-113c-4397-a48b-0cb2c99658f5'] }); + + const resposne = await requestSender.createPackageRoi(body); + expect(resposne).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(1); + expect(checkForExportDuplicateSpy).toHaveBeenCalledTimes(1); + expect(generateTileGroupsSpy).toHaveBeenCalledTimes(2); + expect(createJobSpy).toHaveBeenCalledTimes(1); + expect(resposne.status).toBe(httpStatusCodes.OK); + }); + + it('should return 200 status code and the job created details even if ROI not provided (layers footprint based)', async function () { + const body: ICreatePackageRoi = { + dbId: layerFromCatalog.id, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + }; + const layerDefaultGeometry = feature(layerFromCatalog.metadata.footprint as Geometry, { + maxResolutionDeg: layerFromCatalog.metadata.maxResolutionDeg as number, + }); + const layerDefaultRoi = featureCollection([layerDefaultGeometry]); + const dupParams: JobExportDuplicationParams = { + crs: 'EPSG:4326', + dbId: layerFromCatalog.id, + resourceId: layerFromCatalog.metadata.productId as string, + roi: layerDefaultRoi, + version: layerFromCatalog.metadata.productVersion as string, + }; + const callbacks = [{ url: 'http://example.getmap.com/callback', roi: layerDefaultRoi }]; + + generateTileGroupsSpy.mockReturnValue([]); + findLayerSpy.mockResolvedValue(layerFromCatalog); + createJobSpy.mockResolvedValue({ id: 'b1c59730-c31d-4e44-9c67-4dbbb3b1c812', taskIds: ['6556896a-113c-4397-a48b-0cb2c99658f5'] }); + checkForExportDuplicateSpy.mockResolvedValue(undefined); + validateFreeSpaceSpy.mockResolvedValue(true); + + const resposne = await requestSender.createPackageRoi(body); + + expect(resposne).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(1); + expect(createJobSpy).toHaveBeenCalledTimes(1); + expect(generateTileGroupsSpy).toHaveBeenCalledTimes(1); + expect(checkForExportDuplicateSpy).toHaveBeenCalledTimes(1); + expect(checkForExportDuplicateSpy).toHaveBeenCalledWith(dupParams, callbacks); + expect(resposne.status).toBe(httpStatusCodes.OK); + }); + + it(`should return 200 status code and the exists un-cleaned completed job's callback (with original bbox of request)`, async function () { + checkForExportDuplicateSpy.mockRestore(); + + const expirationTime = new Date(); + const body: ICreatePackageRoi = { + dbId: layerFromCatalog.id, + roi: fc1, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + }; + const origCallback = { + roi: fc1, + links: { + dataURI: + 'https://files-server-route.io/test/downloads/415c9316e58862194145c4b54cf9d87e/Orthophoto_bluemarble_7_1_0_5_2023_02_28T15_09_50_924Z.gpkg', + metadataURI: + 'https://files-server-route.io/test/downloads/415c9316e58862194145c4b54cf9d87e/Orthophoto_bluemarble_7_1_0_5_2023_02_28T15_09_50_924Z.json', + }, + status: OperationStatus.COMPLETED, + fileSize: 10, + requestJobId: 'afbdd5e6-25db-4567-a81f-71e0e7d30761', + expirationTime: expirationTime, + recordCatalogId: layerFromCatalog.id, + }; + + const expectedCompletedCallback = { + roi: fc1, + links: { + dataURI: + 'https://files-server-route.io/test/downloads/415c9316e58862194145c4b54cf9d87e/Orthophoto_bluemarble_7_1_0_5_2023_02_28T15_09_50_924Z.gpkg', + metadataURI: + 'https://files-server-route.io/test/downloads/415c9316e58862194145c4b54cf9d87e/Orthophoto_bluemarble_7_1_0_5_2023_02_28T15_09_50_924Z.json', + }, + status: OperationStatus.COMPLETED, + fileSize: 10, + requestJobId: 'afbdd5e6-25db-4567-a81f-71e0e7d30761', + expirationTime: expirationTime, + recordCatalogId: layerFromCatalog.id, + }; + findLayerSpy.mockResolvedValue(layerFromCatalog); + checkForExportCompletedSpy.mockResolvedValue(origCallback); + checkForExportProcessingSpy.mockResolvedValue(undefined); + validateFreeSpaceSpy.mockResolvedValue(true); + + const response = await requestSender.createPackageRoi(body); + + expect(response).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(1); + expect(createJobSpy).toHaveBeenCalledTimes(0); + expect(checkForExportCompletedSpy).toHaveBeenCalledTimes(1); + expect(checkForExportProcessingSpy).toHaveBeenCalledTimes(0); + expect(validateFreeSpaceSpy).toHaveBeenCalledTimes(0); + expect(JSON.stringify(response.body)).toBe(JSON.stringify(expectedCompletedCallback)); + expect(response.status).toBe(httpStatusCodes.OK); + }); + + it(`should return 200 status code and the exists un-cleaned In-progress job's response with job id, task id and OperationStatus=In-Progress`, async function () { + checkForExportDuplicateSpy.mockRestore(); + + const body: ICreatePackageRoi = { + dbId: layerFromCatalog.id, + roi: fc1, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + }; + + const inProgressJobResonse: ICreateJobResponse = { + id: 'b1c59730-c31d-4e44-9c67-4dbbb3b1c812', + taskIds: ['6556896a-113c-4397-a48b-0cb2c99658f5'], + status: OperationStatus.IN_PROGRESS, + }; + + findLayerSpy.mockResolvedValue(layerFromCatalog); + checkForExportCompletedSpy.mockResolvedValue(undefined); + checkForExportProcessingSpy.mockResolvedValue(inProgressJobResonse); + validateFreeSpaceSpy.mockResolvedValue(true); + + const response = await requestSender.createPackageRoi(body); + + expect(response).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(1); + expect(createJobSpy).toHaveBeenCalledTimes(0); + expect(checkForExportCompletedSpy).toHaveBeenCalledTimes(2); + expect(checkForExportProcessingSpy).toHaveBeenCalledTimes(1); + expect(validateFreeSpaceSpy).toHaveBeenCalledTimes(0); + expect(response.body).toStrictEqual(inProgressJobResonse); + expect(response.status).toBe(httpStatusCodes.OK); + }); + }); + + describe('Sad Path', function () { + it('should return 400 status code because of bad data - no "dbId" field', async function () { + const body = { + roi: fc1, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + } as unknown as ICreatePackageRoi; + + checkForExportDuplicateSpy.mockResolvedValue(undefined); + + const resposne = await requestSender.createPackageRoi(body); + + expect(resposne).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(0); + expect(checkForExportDuplicateSpy).toHaveBeenCalledTimes(0); + expect(createJobSpy).toHaveBeenCalledTimes(0); + + expect(resposne.status).toBe(httpStatusCodes.BAD_REQUEST); + }); + + it('should return 400 status code because of bad data - no "callbackURLs" field', async function () { + const body = { + roi: fc1, + crs: 'EPSG:4326', + priority: 0, + } as unknown as ICreatePackageRoi; + + checkForExportDuplicateSpy.mockResolvedValue(undefined); + const resposne = await requestSender.createPackageRoi(body); + + expect(resposne).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(0); + expect(checkForExportDuplicateSpy).toHaveBeenCalledTimes(0); + expect(createJobSpy).toHaveBeenCalledTimes(0); + expect(resposne.status).toBe(httpStatusCodes.BAD_REQUEST); + }); + + it('should return 400 status code because of bad data - no "maxResolutionDeg" properties in feature', async function () { + const body = { + roi: fcNoMaxResolutionDeg, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + } as unknown as ICreatePackageRoi; + + checkForExportDuplicateSpy.mockResolvedValue(undefined); + const resposne = await requestSender.createPackageRoi(body); + + expect(resposne).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(0); + expect(checkForExportDuplicateSpy).toHaveBeenCalledTimes(0); + expect(createJobSpy).toHaveBeenCalledTimes(0); + expect(resposne.status).toBe(httpStatusCodes.BAD_REQUEST); + }); + + it('should return 400 status code because of Bad Feature geometry - no intersection with layer geometry', async function () { + findLayerSpy.mockResolvedValue(layerFromCatalog); + + const body = { + dbId: layerFromCatalog.id, + roi: fcNoIntersection, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + } as unknown as ICreatePackageRoi; + + checkForExportDuplicateSpy.mockResolvedValue(undefined); + const resposne = await requestSender.createPackageRoi(body); + expect(resposne).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(1); + expect(checkForExportDuplicateSpy).toHaveBeenCalledTimes(0); + expect(createJobSpy).toHaveBeenCalledTimes(0); + expect(resposne.status).toBe(httpStatusCodes.BAD_REQUEST); + }); + + it('should return 400 status code because of Bad Feature maxResolutionDeg property - requested resolution is higher than layer maximum', async function () { + findLayerSpy.mockResolvedValue(layerFromCatalog); + + const body = { + dbId: layerFromCatalog.id, + roi: fcTooHighResolution, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + } as unknown as ICreatePackageRoi; + + checkForExportDuplicateSpy.mockResolvedValue(undefined); + const resposne = await requestSender.createPackageRoi(body); + expect(resposne).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(1); + expect(checkForExportDuplicateSpy).toHaveBeenCalledTimes(0); + expect(createJobSpy).toHaveBeenCalledTimes(0); + expect(resposne.status).toBe(httpStatusCodes.BAD_REQUEST); + }); + }); + + describe('Bad Path', function () { + it('should return 507 status code for insufficient storage to gpkg creation', async function () { + const body: ICreatePackageRoi = { + dbId: layerFromCatalog.id, + roi: fc1, + callbackURLs: ['http://example.getmap.com/callback'], + crs: 'EPSG:4326', + priority: 0, + }; + generateTileGroupsSpy.mockReturnValue([]); + findLayerSpy.mockResolvedValue(layerFromCatalog); + checkForExportDuplicateSpy.mockResolvedValue(undefined); + validateFreeSpaceSpy.mockResolvedValue(false); + const resposne = await requestSender.createPackageRoi(body); + + expect(resposne).toSatisfyApiSpec(); + expect(findLayerSpy).toHaveBeenCalledTimes(1); + expect(createJobSpy).toHaveBeenCalledTimes(0); + expect(resposne.status).toBe(httpStatusCodes.INSUFFICIENT_STORAGE); + }); + }); +}); diff --git a/tests/integration/createPackage/createPackage.spec.ts b/tests/integration/createPackage/createPackage.spec.ts index 9bef5a1..bf4f3db 100644 --- a/tests/integration/createPackage/createPackage.spec.ts +++ b/tests/integration/createPackage/createPackage.spec.ts @@ -46,7 +46,7 @@ describe('tiles', function () { const body: ICreatePackage = { dbId: layerFromCatalog.id, bbox: [34.811938017107494, 31.95475033759175, 34.82237261707599, 31.96426962177354], - targetResolution: 0.0000429153442382812, + targetResolution: 0.00034332275390625, callbackURLs: ['http://example.getmap.com/callback'], crs: 'EPSG:4326', priority: 0, @@ -91,7 +91,7 @@ describe('tiles', function () { const body: ICreatePackage = { dbId: layerFromCatalog.id, bbox: [34.811938017107494, 31.95475033759175, 34.82237261707599, 31.96426962177354], - targetResolution: 0.0000429153442382812, + targetResolution: 0.00034332275390625, callbackURLs: ['http://example.getmap.com/callback'], crs: 'EPSG:4326', priority: 0, @@ -157,7 +157,7 @@ describe('tiles', function () { ], ], }, - targetResolution: 0.0000429153442382812, + targetResolution: 0.00034332275390625, callbackURLs: ['http://example.getmap.com/callback'], crs: 'EPSG:4326', priority: 0, @@ -211,7 +211,7 @@ describe('tiles', function () { const body: ICreatePackage = { dbId: layerFromCatalog.id, bbox: [34.811938017107494, 31.95475033759175, 34.82237261707599, 31.96426962177354], - targetResolution: 0.0000429153442382812, + targetResolution: 0.00034332275390625, callbackURLs: ['http://example.getmap.com/callback'], crs: 'EPSG:4326', priority: 0, @@ -264,7 +264,7 @@ describe('tiles', function () { const body: ICreatePackage = { dbId: layerFromCatalog.id, bbox: [34.811938017107494, 31.95475033759175, 34.82237261707599, 31.96426962177354], - targetResolution: 0.0000429153442382812, + targetResolution: 0.00034332275390625, callbackURLs: ['http://example.getmap.com/callback'], crs: 'EPSG:4326', priority: 0, diff --git a/tests/integration/createPackage/helpers/createPackageSender.ts b/tests/integration/createPackage/helpers/createPackageSender.ts index 6d20432..3502a3a 100644 --- a/tests/integration/createPackage/helpers/createPackageSender.ts +++ b/tests/integration/createPackage/helpers/createPackageSender.ts @@ -1,5 +1,5 @@ import * as supertest from 'supertest'; -import { ICreatePackage } from '../../../../src/common/interfaces'; +import { ICreatePackage, ICreatePackageRoi } from '../../../../src/common/interfaces'; export class CreatePackageSender { public constructor(private readonly app: Express.Application) {} @@ -7,4 +7,8 @@ export class CreatePackageSender { public async create(body: ICreatePackage): Promise { return supertest.agent(this.app).post(`/create`).set('Content-Type', 'application/json').send(body); } + + public async createPackageRoi(body: ICreatePackageRoi): Promise { + return supertest.agent(this.app).post(`/create/roi`).set('Content-Type', 'application/json').send(body); + } } diff --git a/tests/mocks/clients/jobManagerWrapper.ts b/tests/mocks/clients/jobManagerWrapper.ts index d2ea26d..a2edb99 100644 --- a/tests/mocks/clients/jobManagerWrapper.ts +++ b/tests/mocks/clients/jobManagerWrapper.ts @@ -4,30 +4,39 @@ const findCompletedJobMock = jest.fn(); const findInProgressJobMock = jest.fn(); const findPendingJobMock = jest.fn(); const createMock = jest.fn(); +const createExportMock = jest.fn(); const createJobMock = jest.fn(); const getInProgressJobsMock = jest.fn(); const updateJobMock = jest.fn(); const validateAndUpdateExpirationMock = jest.fn(); +const getExportJobsMock = jest.fn(); +const findExportJobMock = jest.fn(); const jobManagerWrapperMock = { createJob: createJobMock, findCompletedJob: findCompletedJobMock, findInProgressJob: findInProgressJobMock, findPendingJob: findPendingJobMock, + findExportJob: findExportJobMock, create: createMock, + createExport: createExportMock, getInProgressJobs: getInProgressJobsMock, updateJob: updateJobMock, + getExportJobs: getExportJobsMock, validateAndUpdateExpiration: validateAndUpdateExpirationMock, } as unknown as JobManagerWrapper; export { jobManagerWrapperMock, createMock, + createExportMock, createJobMock, findCompletedJobMock, findInProgressJobMock, findPendingJobMock, + findExportJobMock, validateAndUpdateExpirationMock, getInProgressJobsMock as getInProgressJobsMock, updateJobMock, + getExportJobsMock, }; diff --git a/tests/mocks/clients/packageManager.ts b/tests/mocks/clients/packageManager.ts index 9e3fc3e..8c6520a 100644 --- a/tests/mocks/clients/packageManager.ts +++ b/tests/mocks/clients/packageManager.ts @@ -2,6 +2,7 @@ import { CreatePackageManager } from '../../../src/createPackage/models/createPa const createPackageMock = jest.fn(); const createJsonMetadataMock = jest.fn(); +const createExportJsonMetadataMock = jest.fn(); const getSeparatorMock = jest.fn(); const sanitizeBboxMock = jest.fn(); const checkForDuplicateMock = jest.fn(); @@ -12,6 +13,7 @@ const updateCallbackURLsMock = jest.fn(); const packageManagerMock = { createPackage: createPackageMock, createJsonMetadata: createJsonMetadataMock, + createExportJsonMetadata: createExportJsonMetadataMock, getSeparator: getSeparatorMock, sanitizeBbox: sanitizeBboxMock, checkForDuplicate: checkForDuplicateMock, @@ -30,4 +32,5 @@ export { checkForCompletedMock, checkForProcessingMock, updateCallbackURLsMock, + createExportJsonMetadataMock, }; diff --git a/tests/mocks/data.ts b/tests/mocks/data.ts index 6db3df1..f9825be 100644 --- a/tests/mocks/data.ts +++ b/tests/mocks/data.ts @@ -1,9 +1,19 @@ /* eslint-disable */ -import { LayerMetadata } from '@map-colonies/mc-model-types'; +import { LayerMetadata, ProductType, RecordType } from '@map-colonies/mc-model-types'; import { IJobResponse, OperationStatus } from '@map-colonies/mc-priority-queue'; -import { ICreatePackage, IJobParameters, ITaskParameters, IWorkerInput } from '../../src/common/interfaces'; +import { FeatureCollection } from '@turf/helpers'; +import { + ExportVersion, + ICreatePackage, + IGeometryRecord, + IJobExportParameters, + IJobParameters, + ITaskParameters, + IWorkerExportInput, + IWorkerInput, +} from '../../src/common/interfaces'; -const layerMetadata = { +const layerMetadata: LayerMetadata = { type: 'RECORD_RASTER', classification: 'string', productName: 'string', @@ -22,7 +32,7 @@ const layerMetadata = { productVersion: '1.0', productType: 'OrthophotoHistory', srsName: 'string', - maxResolutionDeg: 0.000004, + maxResolutionDeg: 0.000171661376953125, maxResolutionMeter: 8000, rms: 0, scale: 1, @@ -39,6 +49,29 @@ const layerMetadata = { ], }, layerPolygonParts: { + bbox: [0, 0, 67.5, 22.5], + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { Resolution: 0.000000167638063430786 }, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [0, 0], + [67.5, 0], + [67.5, 22.5], + [0, 22.5], + [0, 0], + ], + ], + }, + }, + ], + }, + includedInBests: [], + rawProductData: { bbox: [0, 0, 67.5, 22.5], type: 'FeatureCollection', features: [ @@ -60,6 +93,66 @@ const layerMetadata = { }, ], }, + productBoundingBox: '-180,-89.999,0,90', +} as unknown as LayerMetadata; + +const layerMetadataSample: LayerMetadata = { + type: 'RECORD_RASTER', + classification: 'string', + productName: 'string', + description: 'string', + srsId: 'string', + producerName: 'string', + creationDate: '2021-12-23T15:09:28.941Z', + ingestionDate: '2021-12-23T15:09:28.941Z', + updateDate: '2021-12-23T15:09:28.941Z', + sourceDateStart: '2021-12-23T15:09:28.941Z', + sourceDateEnd: '2021-12-23T15:09:28.941Z', + minHorizontalAccuracyCE90: 0, + sensors: ['VIS', 'OTHER'], + region: ['region1', 'region1'], + productId: 'string', + productVersion: '1.0', + productType: 'OrthophotoHistory', + srsName: 'string', + maxResolutionDeg: 0.0054931640625, + maxResolutionMeter: 8000, + rms: 0, + scale: 1, + footprint: { + type: 'Polygon', + coordinates: [ + [ + [0, -89.999], + [0, 90], + [180, 90], + [180, -89.999], + [0, -89.999], + ], + ], + }, + layerPolygonParts: { + bbox: [0, 0, 67.5, 22.5], + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { Resolution: 0.000000167638063430786 }, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [0, 0], + [67.5, 0], + [67.5, 22.5], + [0, 22.5], + [0, 0], + ], + ], + }, + }, + ], + }, includedInBests: [], rawProductData: { bbox: [0, 0, 67.5, 22.5], @@ -86,6 +179,27 @@ const layerMetadata = { productBoundingBox: '-180,-89.999,0,90', } as unknown as LayerMetadata; +const layerMetadataRoi = { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { maxResolutionDeg: 0.0054931640625 }, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [0, -89.999], + [0, 90], + [180, 90], + [180, -89.999], + [0, -89.999], + ], + ], + }, + }, + ], +}; const layerFromCatalog = { id: '0c3e455f-4aeb-4258-982d-f7773469a92d', links: [ @@ -108,6 +222,31 @@ const layerFromCatalog = { metadata: layerMetadata, }; +const layerFromCatalogSample = { + id: '0c3e455f-4aeb-4258-982d-f7773469a92d', + links: [ + { + name: 'string-OrthophotoHistory', + protocol: 'WMS', + url: 'http://mapproxy-map-proxy-map-proxy/service?REQUEST=GetCapabilities', + }, + { + name: 'string-OrthophotoHistory', + protocol: 'WMTS', + url: 'http://mapproxy-map-proxy-map-proxy/wmts/1.0.0/WMTSCapabilities.xml', + }, + { + name: 'string-OrthophotoHistory', + protocol: 'WMTS_LAYER', + url: 'http://mapproxy-map-proxy-map-proxy/wmts/string-OrthophotoHistory/{TileMatrixSet}/{TileMatrix}/{TileCol}/{TileRow}.png', + }, + ], + metadata: layerMetadataSample, +}; + +/** + * @deprecated GetMap API - will be deprecated on future + */ const completedJob: IJobResponse = { id: 'b0b19b88-aecb-4e74-b694-dfa7eada8bf7', resourceId: 'string', @@ -118,6 +257,7 @@ const completedJob: IJobResponse = { parameters: { crs: 'EPSG:4326', sanitizedBbox: [0, 0, 25, 41], + exportVersion: ExportVersion.GETMAP, fileName: 'test.gpkg', relativeDirectoryPath: 'test', zoomLevel: 4, @@ -178,6 +318,9 @@ const completedJob: IJobResponse = { updated: '2021-12-29T08:07:00.270Z', }; +/** + * @deprecated GetMap API - will be deprecated on future + */ const inProgressJob: IJobResponse = { id: 'fa3ab609-377a-4d96-bf0b-e0bb72f683b8', domain: 'testDomain', @@ -189,6 +332,7 @@ const inProgressJob: IJobResponse = { parameters: { fileName: 'test.gpkg', relativeDirectoryPath: 'test', + exportVersion: ExportVersion.GETMAP, crs: 'EPSG:4326', sanitizedBbox: [0, 0, 25, 41], zoomLevel: 4, @@ -231,9 +375,13 @@ const inProgressJob: IJobResponse = { updated: '2021-12-29T10:42:13.487Z', }; +/** + * @deprecated GetMap API - will be deprecated on future + */ const workerInput: IWorkerInput = { fileName: 'test.gpkg', relativeDirectoryPath: 'test', + exportVersion: ExportVersion.GETMAP, sanitizedBbox: [0, 2.999267578125, 25.0048828125, 41.0009765625], targetResolution: 0.0000429153442382812, zoomLevel: 15, @@ -279,6 +427,9 @@ const workerInput: IWorkerInput = { productType: 'OrthophotoHistory', }; +/** + * @deprecated GetMap API - will be deprecated on future + */ const jobs = [inProgressJob, completedJob]; const userInput: ICreatePackage = { @@ -289,4 +440,619 @@ const userInput: ICreatePackage = { crs: 'EPSG:4326', }; -export { layerFromCatalog, workerInput, jobs, userInput, completedJob, inProgressJob }; +const fc1: FeatureCollection = { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { maxResolutionDeg: 0.02197265625 }, + geometry: { + coordinates: [ + [ + [37.42414218385065, 17.95036866237062], + [30.42608533411871, 17.95036866237062], + [30.42608533411871, 11.52904501530621], + [37.42414218385065, 11.52904501530621], + [37.42414218385065, 17.95036866237062], + ], + ], + type: 'Polygon', + }, + }, + { + type: 'Feature', + properties: { maxResolutionDeg: 0.02197265625 }, + geometry: { + coordinates: [ + [ + [29.726720838716574, -10.646156974961286], + [25.120393802953117, -10.646156974961286], + [25.120393802953117, -16.979479051947962], + [29.726720838716574, -16.979479051947962], + [29.726720838716574, -10.646156974961286], + ], + ], + type: 'Polygon', + }, + }, + ], +}; + +const fcNoMaxResolutionDeg: FeatureCollection = { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { maxResolutionDeg: 0.02197265625 }, + geometry: { + coordinates: [ + [ + [37.42414218385065, 17.95036866237062], + [30.42608533411871, 17.95036866237062], + [30.42608533411871, 11.52904501530621], + [37.42414218385065, 11.52904501530621], + [37.42414218385065, 17.95036866237062], + ], + ], + type: 'Polygon', + }, + }, + ], +}; + +const fcNoIntersection: FeatureCollection = { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { maxResolutionDeg: 0.02197265625 }, + geometry: { + coordinates: [ + [ + [-47.86631849806537, -5.0650089181259546], + [-59.270868429887855, -5.0650089181259546], + [-59.270868429887855, -19.06378650396573], + [-47.86631849806537, -19.06378650396573], + [-47.86631849806537, -5.0650089181259546], + ], + ], + type: 'Polygon', + }, + }, + ], +}; + +const fcTooHighResolution: FeatureCollection = { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { maxResolutionDeg: 0.000000167638063430786 }, + geometry: { + coordinates: [ + [ + [37.42414218385065, 17.95036866237062], + [30.42608533411871, 17.95036866237062], + [30.42608533411871, 11.52904501530621], + [37.42414218385065, 11.52904501530621], + [37.42414218385065, 17.95036866237062], + ], + ], + type: 'Polygon', + }, + }, + ], +}; + +const workerExportInput: IWorkerExportInput = { + roi: { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { + maxResolutionDeg: 0.02197265625, + }, + geometry: { + coordinates: [ + [ + [37.42414218385065, 17.95036866237062], + [30.42608533411871, 17.95036866237062], + [30.42608533411871, 11.52904501530621], + [37.42414218385065, 11.52904501530621], + [37.42414218385065, 17.95036866237062], + ], + ], + type: 'Polygon', + }, + }, + { + type: 'Feature', + properties: { + maxResolutionDeg: 0.02197265625, + }, + geometry: { + coordinates: [ + [ + [29.726720838716574, -10.646156974961286], + [25.120393802953117, -10.646156974961286], + [25.120393802953117, -16.979479051947962], + [29.726720838716574, -16.979479051947962], + [29.726720838716574, -10.646156974961286], + ], + ], + type: 'Polygon', + }, + }, + ], + }, + fileNamesTemplates: { + dataURI: 'OrthophotoHistory_string_1_0_5_2023_03_02T05_43_27_066Z.gpkg', + metadataURI: 'OrthophotoHistory_string_1_0_5_2023_03_02T05_43_27_066Z.json', + }, + relativeDirectoryPath: '1a26c1661df10eee54f9727fcdb8b71d', + dbId: '0c3e455f-4aeb-4258-982d-f7773469a92d', + exportVersion: ExportVersion.ROI, + version: '1.0', + cswProductId: 'string', + crs: 'EPSG:4326', + productType: 'OrthophotoHistory', + batches: [], + sources: [ + { + path: '1a26c1661df10eee54f9727fcdb8b71d/OrthophotoHistory_string_1_0_5_2023_03_02T05_43_27_066Z.gpkg', + type: 'GPKG', + extent: { + minX: 25.120393802953117, + minY: -16.979479051947962, + maxX: 37.42414218385065, + maxY: 17.95036866237062, + }, + }, + { + path: 'undefined/undefined', + type: 'S3', + }, + ], + priority: 0, + callbacks: [ + { + url: 'http://example.getmap.com/callback', + roi: { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { + maxResolutionDeg: 0.02197265625, + }, + geometry: { + coordinates: [ + [ + [37.42414218385065, 17.95036866237062], + [30.42608533411871, 17.95036866237062], + [30.42608533411871, 11.52904501530621], + [37.42414218385065, 11.52904501530621], + [37.42414218385065, 17.95036866237062], + ], + ], + type: 'Polygon', + }, + }, + { + type: 'Feature', + properties: { + maxResolutionDeg: 0.02197265625, + }, + geometry: { + coordinates: [ + [ + [29.726720838716574, -10.646156974961286], + [25.120393802953117, -10.646156974961286], + [25.120393802953117, -16.979479051947962], + [29.726720838716574, -16.979479051947962], + [29.726720838716574, -10.646156974961286], + ], + ], + type: 'Polygon', + }, + }, + ], + }, + }, + ], + gpkgEstimatedSize: 0, +}; + +const completedExportJob: IJobResponse = { + id: 'afbdd5e6-25db-4567-a81f-71e0e7d30761', + resourceId: 'string_completed', + version: '1.0', + type: 'rasterTilesExporter', + domain: 'testDomain', + description: '', + parameters: { + crs: 'EPSG:4326', + roi: fc1, + callbacks: [ + { + url: 'http://localhost:1234', + roi: fc1, + }, + ], + exportVersion: ExportVersion.ROI, + callbackParams: { + roi: fc1, + links: { + dataURI: 'http://files-server/downloads/Orthophoto_testArea_1_0_2023_02_28T15_09_50_924Z.gpkg', + metadataURI: 'http://files-server/downloads/Orthophoto_testArea_1_0_2023_02_28T15_09_50_924Z.json', + }, + status: OperationStatus.COMPLETED, + fileSize: 1773568, + requestJobId: 'afbdd5e6-25db-4567-a81f-71e0e7d30761', + expirationTime: new Date(), + recordCatalogId: 'b0b19b88-aecb-4e74-b694-dfa7eada8bf7', + }, + gpkgEstimatedSize: 187500, + fileNamesTemplates: { + dataURI: 'Orthophoto_testArea_1_0_2023_02_28T15_09_50_924Z.gpkg', + metadataURI: 'Orthophoto_testArea_1_0_2023_02_28T15_09_50_924Z.json', + }, + relativeDirectoryPath: '415c9316e58862194145c4b54cf9d87e', + }, + + status: OperationStatus.COMPLETED, + percentage: 100, + reason: '', + isCleaned: false, + priority: 1000, + expirationDate: new Date(), + internalId: '0c3e455f-4aeb-4258-982d-f7773469a92d', + productName: 'string', + productType: 'Orthophoto', + taskCount: 1, + completedTasks: 1, + failedTasks: 0, + expiredTasks: 0, + pendingTasks: 0, + inProgressTasks: 0, + abortedTasks: 0, + tasks: [ + { + id: '542ebbfd-f4d1-4c77-bd4d-97ca121f0de7', + type: 'rasterTilesExporter', + description: '', + parameters: { + batches: [], + sources: [], + }, + status: OperationStatus.COMPLETED, + reason: '', + attempts: 0, + resettable: true, + created: '2021-12-29T08:06:48.399Z', + updated: '2021-12-29T08:07:00.293Z', + }, + ], + created: '2021-12-29T08:06:48.399Z', + updated: '2021-12-29T08:07:00.270Z', +}; + +const inProgressExportJob: IJobResponse = { + id: 'fa3ab609-377a-4d96-bf0b-e0bb72f683b8', + domain: 'testDomain', + resourceId: 'string_inprogress', + version: '1.0', + type: 'rasterTilesExporter', + percentage: 0, + description: '', + parameters: { + crs: 'EPSG:4326', + roi: fc1, + callbacks: [{ url: 'http://localhost:6969', roi: fc1 }], + exportVersion: ExportVersion.ROI, + gpkgEstimatedSize: 187500, + fileNamesTemplates: { + dataURI: 'Orthophoto_testArea_1_0_2023_03_01T15_09_50_924Z.gpkg', + metadataURI: 'Orthophoto_testArea_1_0_2023_03_01T15_09_50_924Z.json', + }, + relativeDirectoryPath: '415c9316e58862194145c4b54cf9d87e', + }, + status: OperationStatus.IN_PROGRESS, + reason: '', + isCleaned: false, + priority: 0, + expirationDate: new Date(), + internalId: '0c3e455f-4aeb-4258-982d-f7773469a92d', + productName: 'string', + productType: 'OrthophotoHistory', + taskCount: 1, + completedTasks: 0, + failedTasks: 0, + expiredTasks: 0, + pendingTasks: 0, + abortedTasks: 0, + inProgressTasks: 1, + tasks: [ + { + id: '1f765695-338b-4752-b182-a8cbae3c610e', + type: 'rasterTilesExporter', + description: '', + parameters: { + batches: [], + sources: [], + }, + status: OperationStatus.IN_PROGRESS, + reason: '', + attempts: 0, + resettable: true, + created: '2021-12-29T10:42:13.487Z', + updated: '2021-12-29T10:42:16.231Z', + }, + ], + created: '2021-12-29T10:42:13.487Z', + updated: '2021-12-29T10:42:13.487Z', +}; + +const exportJobs = [inProgressExportJob, completedExportJob]; + +const metadataExportJson = { + type: RecordType.RECORD_RASTER, + classification: 'string', + productName: 'string', + description: 'string', + srsId: 'string', + producerName: 'string', + creationDate: '2021-12-23T15:09:28.941Z', + ingestionDate: '2021-12-23T15:09:28.941Z', + updateDate: '2021-12-23T15:09:28.941Z', + sourceDateStart: '2021-12-23T15:09:28.941Z', + sourceDateEnd: '2021-12-23T15:09:28.941Z', + minHorizontalAccuracyCE90: 0, + sensors: ['VIS', 'OTHER'], + region: ['region1', 'region1'], + productId: 'string', + productVersion: '1.0', + productType: ProductType.ORTHOPHOTO_HISTORY, + srsName: 'string', + maxResolutionDeg: 0.02197265625, + maxResolutionMeter: 8000, + rms: 0, + scale: 1, + footprint: { + type: 'MultiPolygon', + coordinates: [ + [ + [ + [30.42608533411871, 11.52904501530621], + [37.42414218385065, 11.52904501530621], + [37.42414218385065, 17.95036866237062], + [30.42608533411871, 17.95036866237062], + [30.42608533411871, 11.52904501530621], + ], + ], + [ + [ + [25.120393802953117, -16.979479051947962], + [29.726720838716574, -16.979479051947962], + [29.726720838716574, -10.646156974961286], + [25.120393802953117, -10.646156974961286], + [25.120393802953117, -16.979479051947962], + ], + ], + ], + }, + layerPolygonParts: { + bbox: [25.120393802953117, -16.979479051947962, 37.42414218385065, 17.95036866237062], + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { + Resolution: 0.02197265625, + }, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [30.42608533411871, 11.52904501530621], + [37.42414218385065, 11.52904501530621], + [37.42414218385065, 17.95036866237062], + [30.42608533411871, 17.95036866237062], + [30.42608533411871, 11.52904501530621], + ], + ], + }, + }, + ], + }, + includedInBests: [], + rawProductData: { + bbox: [0, 0, 67.5, 22.5], + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: {}, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [0, 0], + [67.5, 0], + [67.5, 22.5], + [0, 22.5], + [0, 0], + ], + ], + }, + }, + ], + }, + productBoundingBox: '25.120393802953117,-16.979479051947962,37.42414218385065,17.95036866237062', +} as unknown as LayerMetadata; + +const featuresRecordsSampleFc1 = [ + { + geometry: { + coordinates: [ + [ + [37.42414218385065, 17.95036866237062], + [30.42608533411871, 17.95036866237062], + [30.42608533411871, 11.52904501530621], + [37.42414218385065, 11.52904501530621], + [37.42414218385065, 17.95036866237062], + ], + ], + type: 'Polygon', + }, + sanitizedBox: [28.125, 11.25, 39.375, 22.5], + targetResolutionDeg: 0.02197265625, + targetResolutionMeter: 2445.98, + zoomLevel: 5, + }, + { + geometry: { + coordinates: [ + [ + [29.726720838716574, -10.646156974961286], + [25.120393802953117, -10.646156974961286], + [25.120393802953117, -16.979479051947962], + [29.726720838716574, -16.979479051947962], + [29.726720838716574, -10.646156974961286], + ], + ], + type: 'Polygon', + }, + sanitizedBox: [22.5, -22.5, 33.75, -5.625], + targetResolutionDeg: 0.02197265625, + targetResolutionMeter: 2445.98, + zoomLevel: 5, + }, +]; + +const pycswRecord = { + id: '0c3e455f-4aeb-4258-982d-f7773469a92d', + links: [ + { + name: 'string-OrthophotoHistory', + protocol: 'WMS', + url: 'http://mapproxy-map-proxy-map-proxy/service?REQUEST=GetCapabilities', + }, + { + name: 'string-OrthophotoHistory', + protocol: 'WMTS', + url: 'http://mapproxy-map-proxy-map-proxy/wmts/1.0.0/WMTSCapabilities.xml', + }, + { + name: 'string-OrthophotoHistory', + protocol: 'WMTS_LAYER', + url: 'http://mapproxy-map-proxy-map-proxy/wmts/string-OrthophotoHistory/{TileMatrixSet}/{TileMatrix}/{TileCol}/{TileRow}.png', + }, + ], + metadata: { + type: 'RECORD_RASTER', + classification: 'string', + productName: 'string', + description: 'string', + srsId: 'string', + producerName: 'string', + creationDate: '2021-12-23T15:09:28.941Z', + ingestionDate: '2021-12-23T15:09:28.941Z', + updateDate: '2021-12-23T15:09:28.941Z', + sourceDateStart: '2021-12-23T15:09:28.941Z', + sourceDateEnd: '2021-12-23T15:09:28.941Z', + minHorizontalAccuracyCE90: 0, + sensors: ['VIS', 'OTHER'], + region: ['region1', 'region1'], + productId: 'string', + productVersion: '1.0', + productType: 'OrthophotoHistory', + srsName: 'string', + maxResolutionDeg: 0.0054931640625, + maxResolutionMeter: 8000, + rms: 0, + scale: 1, + footprint: { + type: 'Polygon', + coordinates: [ + [ + [0, -89.999], + [0, 90], + [180, 90], + [180, -89.999], + [0, -89.999], + ], + ], + }, + layerPolygonParts: { + bbox: [0, 0, 67.5, 22.5], + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: { Resolution: 0.000000167638063430786 }, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [0, 0], + [67.5, 0], + [67.5, 22.5], + [0, 22.5], + [0, 0], + ], + ], + }, + }, + ], + }, + includedInBests: [], + rawProductData: { + bbox: [0, 0, 67.5, 22.5], + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + properties: {}, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [0, 0], + [67.5, 0], + [67.5, 22.5], + [0, 22.5], + [0, 0], + ], + ], + }, + }, + ], + }, + productBoundingBox: '-180,-89.999,0,90', + } as unknown as LayerMetadata, +}; + +export { + layerFromCatalog, + workerInput, + workerExportInput, + jobs, + userInput, + completedJob, + inProgressJob, + fc1, + fcNoMaxResolutionDeg, + fcNoIntersection, + fcTooHighResolution, + completedExportJob, + inProgressExportJob, + exportJobs, + metadataExportJson, + layerFromCatalogSample, + featuresRecordsSampleFc1, + layerMetadataRoi, + layerMetadataSample, + pycswRecord, +}; diff --git a/tests/mocks/data/mockJob.ts b/tests/mocks/data/mockJob.ts index 6c9ae01..66200ce 100644 --- a/tests/mocks/data/mockJob.ts +++ b/tests/mocks/data/mockJob.ts @@ -1,7 +1,11 @@ /* eslint-disable @typescript-eslint/no-magic-numbers */ import { OperationStatus } from '@map-colonies/mc-priority-queue'; -import { JobResponse } from '../../../src/common/interfaces'; +import { ExportVersion, JobExportResponse, JobResponse } from '../../../src/common/interfaces'; +import { fc1 } from '../data'; +/** + * @deprecated GetMap API - will be deprecated on future + */ export const mockJob: JobResponse = { id: 'b729f0e0-af64-4c2c-ba4e-e799e2f3df0f', resourceId: 'test', @@ -12,6 +16,7 @@ export const mockJob: JobResponse = { parameters: { crs: 'EPSG:4326', fileName: 'test.gpkg', + exportVersion: ExportVersion.GETMAP, callbacks: [ { url: 'http://example.getmap.com/callback', @@ -49,3 +54,54 @@ export const mockJob: JobResponse = { created: '2022-08-29T07:06:05.043Z', updated: '2022-08-29T07:13:05.206Z', }; + +export const mockCompletedJob: JobExportResponse = { + id: 'b729f0e0-af64-4c2c-ba4e-e799e2f3df0f', + resourceId: 'testCompleted', + domain: 'testDomain', + version: '1.0', + type: 'rasterTilesExporter', + description: '', + parameters: { + crs: 'EPSG:4326', + roi: fc1, + callbacks: [ + { + url: 'http://localhost:1234', + roi: fc1, + }, + { + url: 'http://localhost:5678', + roi: fc1, + }, + ], + exportVersion: ExportVersion.ROI, + gpkgEstimatedSize: 187500, + fileNamesTemplates: { + dataURI: 'Orthophoto_testArea_1_0_2023_02_28T15_09_50_924Z.gpkg', + metadataURI: 'Orthophoto_testArea_1_0_2023_02_28T15_09_50_924Z.json', + }, + relativeDirectoryPath: '415c9316e58862194145c4b54cf9d87e', + }, + status: OperationStatus.IN_PROGRESS, + percentage: 100, + reason: '', + isCleaned: false, + priority: 0, + expirationDate: new Date(), + internalId: '880a9316-0f10-4874-92e2-a62d587a1169', + producerName: undefined, + productName: 'test', + productType: 'Orthophoto', + additionalIdentifiers: '0,-90,180,903', + taskCount: 1, + completedTasks: 0, + failedTasks: 0, + expiredTasks: 0, + pendingTasks: 0, + inProgressTasks: 0, + abortedTasks: 0, + tasks: [], + created: '2022-08-29T07:06:05.043Z', + updated: '2022-08-29T07:13:05.206Z', +}; diff --git a/tests/unit/clients/jobManagerClient.spec.ts b/tests/unit/clients/jobManagerClient.spec.ts index 11f2663..003bbe7 100644 --- a/tests/unit/clients/jobManagerClient.spec.ts +++ b/tests/unit/clients/jobManagerClient.spec.ts @@ -1,16 +1,26 @@ import jsLogger from '@map-colonies/js-logger'; import { OperationStatus } from '@map-colonies/mc-priority-queue'; import { getUTCDate } from '@map-colonies/mc-utils'; -import { JobManagerWrapper } from '../../../src/clients/jobManagerWrapper'; -import { JobResponse } from '../../../src/common/interfaces'; +import { IFindJob, JobManagerWrapper } from '../../../src/clients/jobManagerWrapper'; +import { JobResponse, ICreateJobResponse as JobInProgressResponse, JobExportDuplicationParams } from '../../../src/common/interfaces'; import { configMock, registerDefaultConfig } from '../../mocks/config'; -import { inProgressJob, jobs, workerInput } from '../../mocks/data'; +import { + completedExportJob, + fc1, + inProgressExportJob, + inProgressJob, + jobs, + layerFromCatalog, + workerExportInput, + workerInput, +} from '../../mocks/data'; let jobManagerClient: JobManagerWrapper; let postFun: jest.Mock; let putFun: jest.Mock; let getJobs: jest.Mock; let get: jest.Mock; +let getExportJobs: jest.Mock; describe('JobManagerClient', () => { describe('#createJob', () => { @@ -25,116 +35,296 @@ describe('JobManagerClient', () => { jest.restoreAllMocks(); }); - it('should create job successfully', async () => { - postFun = jest.fn(); - (jobManagerClient as unknown as { post: unknown }).post = postFun.mockResolvedValue({ id: '123', taskIds: ['123'] }); - await jobManagerClient.create(workerInput); + describe('getMap', () => { + /** + * @deprecated GetMap API - will be deprecated on future + */ + it('should create job successfully', async () => { + postFun = jest.fn(); + (jobManagerClient as unknown as { post: unknown }).post = postFun.mockResolvedValue({ id: '123', taskIds: ['123'] }); + await jobManagerClient.create(workerInput); - expect(postFun).toHaveBeenCalledTimes(1); - }); + expect(postFun).toHaveBeenCalledTimes(1); + }); - it('should update job successfully', async () => { - putFun = jest.fn(); - (jobManagerClient as unknown as { put: unknown }).put = putFun.mockResolvedValue(undefined); - await jobManagerClient.updateJob('123213', { status: OperationStatus.COMPLETED }); + it('should update job successfully', async () => { + putFun = jest.fn(); + (jobManagerClient as unknown as { put: unknown }).put = putFun.mockResolvedValue(undefined); + await jobManagerClient.updateJob('123213', { status: OperationStatus.COMPLETED }); - expect(putFun).toHaveBeenCalledTimes(1); - }); + expect(putFun).toHaveBeenCalledTimes(1); + }); + + /** + * @deprecated GetMap API - will be deprecated on future + */ + it('should findCompletedJobs successfully', async () => { + getJobs = jest.fn(); - it('should findCompletedJobs successfully', async () => { - getJobs = jest.fn(); + const jobManager = jobManagerClient as unknown as { getJobs: unknown }; + jobManager.getJobs = getJobs.mockResolvedValue(jobs); - const jobManager = jobManagerClient as unknown as { getJobs: unknown }; - jobManager.getJobs = getJobs.mockResolvedValue(jobs); + const completedJobs = await jobManagerClient.findCompletedJob({ + resourceId: jobs[0].resourceId, + version: jobs[0].version, + dbId: jobs[0].internalId as string, + zoomLevel: jobs[0].parameters.zoomLevel, + crs: 'EPSG:4326', + sanitizedBbox: jobs[0].parameters.sanitizedBbox, + }); - const completedJobs = await jobManagerClient.findCompletedJob({ - resourceId: jobs[0].resourceId, - version: jobs[0].version, - dbId: jobs[0].internalId as string, - zoomLevel: jobs[0].parameters.zoomLevel, - crs: 'EPSG:4326', - sanitizedBbox: jobs[0].parameters.sanitizedBbox, + expect(getJobs).toHaveBeenCalledTimes(1); + expect(completedJobs).toBeDefined(); }); - expect(getJobs).toHaveBeenCalledTimes(1); - expect(completedJobs).toBeDefined(); - }); + /** + * @deprecated GetMap API - will be deprecated on future + */ + it('should findInProgressJob successfully', async () => { + getJobs = jest.fn(); - it('should findInProgressJob successfully', async () => { - getJobs = jest.fn(); + const jobManager = jobManagerClient as unknown as { getJobs: unknown }; + jobManager.getJobs = getJobs.mockResolvedValue(jobs); - const jobManager = jobManagerClient as unknown as { getJobs: unknown }; - jobManager.getJobs = getJobs.mockResolvedValue(jobs); + const completedJobs = await jobManagerClient.findInProgressJob({ + resourceId: jobs[0].resourceId, + version: jobs[0].version, + dbId: jobs[0].internalId as string, + zoomLevel: jobs[0].parameters.zoomLevel, + crs: 'EPSG:4326', + sanitizedBbox: jobs[0].parameters.sanitizedBbox, + }); - const completedJobs = await jobManagerClient.findInProgressJob({ - resourceId: jobs[0].resourceId, - version: jobs[0].version, - dbId: jobs[0].internalId as string, - zoomLevel: jobs[0].parameters.zoomLevel, - crs: 'EPSG:4326', - sanitizedBbox: jobs[0].parameters.sanitizedBbox, + expect(getJobs).toHaveBeenCalledTimes(1); + expect(completedJobs).toBeDefined(); }); - expect(getJobs).toHaveBeenCalledTimes(1); - expect(completedJobs).toBeDefined(); - }); + /** + * @deprecated GetMap API - will be deprecated on future + */ + it('should get In-Progress jobs status successfully', async () => { + getJobs = jest.fn(); + const jobs: JobResponse[] = []; + jobs.push(inProgressJob); + const jobManager = jobManagerClient as unknown as { getJobs: unknown }; + jobManager.getJobs = getJobs.mockResolvedValue(jobs); - it('should get In-Progress jobs status successfully', async () => { - getJobs = jest.fn(); - const jobs: JobResponse[] = []; - jobs.push(inProgressJob); - const jobManager = jobManagerClient as unknown as { getJobs: unknown }; - jobManager.getJobs = getJobs.mockResolvedValue(jobs); + const result = await jobManagerClient.getInProgressJobs(); - const result = await jobManagerClient.getInProgressJobs(); + expect(getJobs).toHaveBeenCalledTimes(1); + expect(result).toBeDefined(); + expect(result).toEqual(jobs); + }); - expect(getJobs).toHaveBeenCalledTimes(1); - expect(result).toBeDefined(); - expect(result).toEqual(jobs); - }); + /** + * @deprecated GetMap API - will be deprecated on future + */ + it('should successfully update job expirationDate (old expirationDate lower)', async () => { + const expirationDays: number = configMock.get('jobManager.expirationDays'); + const testExpirationDate = getUTCDate(); + const expectedNewExpirationDate = getUTCDate(); + testExpirationDate.setDate(testExpirationDate.getDate() - expirationDays); + expectedNewExpirationDate.setDate(expectedNewExpirationDate.getDate() + expirationDays); + expectedNewExpirationDate.setSeconds(0, 0); - it('should successfully update job expirationDate (old expirationDate lower)', async () => { - const expirationDays: number = configMock.get('jobManager.expirationDays'); - const testExpirationDate = getUTCDate(); - const expectedNewExpirationDate = getUTCDate(); - testExpirationDate.setDate(testExpirationDate.getDate() - expirationDays); - expectedNewExpirationDate.setDate(expectedNewExpirationDate.getDate() + expirationDays); - expectedNewExpirationDate.setSeconds(0, 0); - - get = jest.fn(); - putFun = jest.fn(); - (jobManagerClient as unknown as { put: unknown }).put = putFun.mockResolvedValue(undefined); - const jobManager = jobManagerClient as unknown as { get: unknown }; - jobManager.get = get.mockResolvedValue({ ...inProgressJob, expirationDate: testExpirationDate }); - - await jobManagerClient.validateAndUpdateExpiration(inProgressJob.id); - - expect(get).toHaveBeenCalledTimes(1); - expect(putFun).toHaveBeenCalledTimes(1); - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access - const expirationParamCall: Date = putFun.mock.calls[0][1].expirationDate; - expirationParamCall.setSeconds(0, 0); - expect(JSON.stringify(expirationParamCall)).toBe(JSON.stringify(expectedNewExpirationDate)); + get = jest.fn(); + putFun = jest.fn(); + (jobManagerClient as unknown as { put: unknown }).put = putFun.mockResolvedValue(undefined); + const jobManager = jobManagerClient as unknown as { get: unknown }; + jobManager.get = get.mockResolvedValue({ ...inProgressJob, expirationDate: testExpirationDate }); + + await jobManagerClient.validateAndUpdateExpiration(inProgressJob.id); + + expect(get).toHaveBeenCalledTimes(1); + expect(putFun).toHaveBeenCalledTimes(1); + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access + const expirationParamCall: Date = putFun.mock.calls[0][1].expirationDate; + expirationParamCall.setSeconds(0, 0); + expect(JSON.stringify(expirationParamCall)).toBe(JSON.stringify(expectedNewExpirationDate)); + }); + + /** + * @deprecated GetMap API - will be deprecated on future + */ + it('should not update job expirationDate (old expirationDate higher)', async () => { + const expirationDays: number = configMock.get('jobManager.expirationDays'); + const testExpirationDate = getUTCDate(); + const expectedNewExpirationDate = getUTCDate(); + testExpirationDate.setDate(testExpirationDate.getDate() + 2 * expirationDays); + expectedNewExpirationDate.setDate(expectedNewExpirationDate.getDate() + expirationDays); + expectedNewExpirationDate.setSeconds(0, 0); + + get = jest.fn(); + putFun = jest.fn(); + (jobManagerClient as unknown as { put: unknown }).put = putFun.mockResolvedValue(undefined); + const jobManager = jobManagerClient as unknown as { get: unknown }; + jobManager.get = get.mockResolvedValue({ ...inProgressJob, expirationDate: testExpirationDate }); + + await jobManagerClient.validateAndUpdateExpiration(inProgressJob.id); + + expect(get).toHaveBeenCalledTimes(1); + expect(putFun).toHaveBeenCalledTimes(0); + }); }); - it('should not update job expirationDate (old expirationDate higher)', async () => { - const expirationDays: number = configMock.get('jobManager.expirationDays'); - const testExpirationDate = getUTCDate(); - const expectedNewExpirationDate = getUTCDate(); - testExpirationDate.setDate(testExpirationDate.getDate() + 2 * expirationDays); - expectedNewExpirationDate.setDate(expectedNewExpirationDate.getDate() + expirationDays); - expectedNewExpirationDate.setSeconds(0, 0); + describe('RoiExport', () => { + describe('Export Job Creation', () => { + it('should create Export job successfully', async () => { + const inProgressJobIds = { id: '123', taskIds: ['123'] }; + const expectedResponse: JobInProgressResponse = { + ...inProgressJobIds, + status: OperationStatus.IN_PROGRESS, + }; + postFun = jest.fn(); + (jobManagerClient as unknown as { post: unknown }).post = postFun.mockResolvedValue({ id: '123', taskIds: ['123'] }); + const response = await jobManagerClient.createExport(workerExportInput); + expect(postFun).toHaveBeenCalledTimes(1); + expect(response).toStrictEqual(expectedResponse); + }); + }); + + describe('Get Export Jobs', () => { + it('should getting jobs that match find params Export job successfully', async () => { + const findJobRequest: IFindJob = { + resourceId: layerFromCatalog.metadata.productId, + version: layerFromCatalog.metadata.productVersion, + isCleaned: 'false', + status: OperationStatus.IN_PROGRESS, + shouldReturnTasks: 'false', + }; + get = jest.fn(); + (jobManagerClient as unknown as { get: unknown }).get = get.mockResolvedValue([inProgressExportJob]); + const response = await jobManagerClient.getExportJobs(findJobRequest); + expect(get).toHaveBeenCalledTimes(1); + expect(response).toBeDefined(); + }); + }); + + describe('Find Job by Status', () => { + it('should findExportCompletedJobs successfully', async () => { + const tilesJobType = configMock.get('workerTypes.tiles.jobType'); + getExportJobs = jest.fn(); + const jobManager = jobManagerClient as unknown as { getExportJobs: unknown }; + jobManager.getExportJobs = getExportJobs.mockResolvedValue([completedExportJob]); + + const jobParams: JobExportDuplicationParams = { + resourceId: completedExportJob.resourceId, + version: completedExportJob.version, + dbId: completedExportJob.internalId as string, + crs: 'EPSG:4326', + roi: fc1, + }; + + const completedJobs = await jobManagerClient.findExportJob(OperationStatus.COMPLETED, jobParams); + const resultParams = { + resourceId: completedJobs?.resourceId, + version: completedJobs?.version, + dbId: completedJobs?.internalId as string, + crs: completedJobs?.parameters.crs, + roi: completedJobs?.parameters.roi, + }; + expect(getExportJobs).toHaveBeenCalledTimes(1); + expect(getExportJobs).toHaveBeenCalledWith({ + resourceId: jobParams.resourceId, + version: jobParams.version, + isCleaned: 'false', + type: tilesJobType, + shouldReturnTasks: 'false', + status: OperationStatus.COMPLETED, + }); + expect(completedJobs).toBeDefined(); + expect(resultParams).toStrictEqual(jobParams); + }); + + it('should findExportInProgressJobs successfully', async () => { + const tilesJobType = configMock.get('workerTypes.tiles.jobType'); + getExportJobs = jest.fn(); + const jobManager = jobManagerClient as unknown as { getExportJobs: unknown }; + jobManager.getExportJobs = getExportJobs.mockResolvedValue([inProgressExportJob]); + + const jobParams: JobExportDuplicationParams = { + resourceId: inProgressExportJob.resourceId, + version: inProgressExportJob.version, + dbId: inProgressExportJob.internalId as string, + crs: 'EPSG:4326', + roi: fc1, + }; - get = jest.fn(); - putFun = jest.fn(); - (jobManagerClient as unknown as { put: unknown }).put = putFun.mockResolvedValue(undefined); - const jobManager = jobManagerClient as unknown as { get: unknown }; - jobManager.get = get.mockResolvedValue({ ...inProgressJob, expirationDate: testExpirationDate }); + const inProgressExportJobJobs = await jobManagerClient.findExportJob(OperationStatus.IN_PROGRESS, jobParams); + const resultParams = { + resourceId: inProgressExportJobJobs?.resourceId, + version: inProgressExportJobJobs?.version, + dbId: inProgressExportJobJobs?.internalId as string, + crs: inProgressExportJobJobs?.parameters.crs, + roi: inProgressExportJobJobs?.parameters.roi, + }; + expect(getExportJobs).toHaveBeenCalledTimes(1); + expect(getExportJobs).toHaveBeenCalledWith({ + resourceId: jobParams.resourceId, + version: jobParams.version, + isCleaned: 'false', + type: tilesJobType, + shouldReturnTasks: 'false', + status: OperationStatus.IN_PROGRESS, + }); + expect(inProgressExportJobJobs).toBeDefined(); + expect(resultParams).toStrictEqual(jobParams); + }); + }); + describe('Update Jobs', () => { + it('should successfully update running Export job (already in progress) expirationDate (old expirationDate lower)', async () => { + const expirationDays: number = configMock.get('jobManager.expirationDays'); + const testExpirationDate = getUTCDate(); + const expectedNewExpirationDate = getUTCDate(); + testExpirationDate.setDate(testExpirationDate.getDate() - expirationDays); + expectedNewExpirationDate.setDate(expectedNewExpirationDate.getDate() + expirationDays); + expectedNewExpirationDate.setSeconds(0, 0); + + get = jest.fn(); + putFun = jest.fn(); + (jobManagerClient as unknown as { put: unknown }).put = putFun.mockResolvedValue(undefined); + const jobManager = jobManagerClient as unknown as { get: unknown }; + jobManager.get = get.mockResolvedValue({ ...inProgressExportJob, expirationDate: testExpirationDate }); + + await jobManagerClient.validateAndUpdateExpiration(inProgressExportJob.id); + + expect(get).toHaveBeenCalledTimes(1); + expect(putFun).toHaveBeenCalledTimes(1); + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access + const expirationParamCall: Date = putFun.mock.calls[0][1].expirationDate; + expirationParamCall.setSeconds(0, 0); + expect(JSON.stringify(expirationParamCall)).toBe(JSON.stringify(expectedNewExpirationDate)); + }); - await jobManagerClient.validateAndUpdateExpiration(inProgressJob.id); + it('should not update running Export job (already in progress) expirationDate (old expirationDate higher)', async () => { + const expirationDays: number = configMock.get('jobManager.expirationDays'); + const testExpirationDate = getUTCDate(); + const expectedNewExpirationDate = getUTCDate(); + testExpirationDate.setDate(testExpirationDate.getDate() + 2 * expirationDays); + expectedNewExpirationDate.setDate(expectedNewExpirationDate.getDate() + expirationDays); + expectedNewExpirationDate.setSeconds(0, 0); - expect(get).toHaveBeenCalledTimes(1); - expect(putFun).toHaveBeenCalledTimes(0); + get = jest.fn(); + putFun = jest.fn(); + (jobManagerClient as unknown as { put: unknown }).put = putFun.mockResolvedValue(undefined); + const jobManager = jobManagerClient as unknown as { get: unknown }; + jobManager.get = get.mockResolvedValue({ ...inProgressExportJob, expirationDate: testExpirationDate }); + + await jobManagerClient.validateAndUpdateExpiration(inProgressExportJob.id); + + expect(get).toHaveBeenCalledTimes(1); + expect(putFun).toHaveBeenCalledTimes(0); + }); + }); + describe('Get tasks by job id', () => { + it('should getting all task that match specific job id provided (uuid-string)', async () => { + get = jest.fn(); + (jobManagerClient as unknown as { get: unknown }).get = get.mockResolvedValue([]); + const response = await jobManagerClient.getTasksByJobId(inProgressExportJob.id); + expect(get).toHaveBeenCalledTimes(1); + expect(response).toBeDefined(); + }); + }); }); }); }); diff --git a/tests/unit/common/utils/utils.spec.ts b/tests/unit/common/utils/utils.spec.ts index de8b63f..40a2ea8 100644 --- a/tests/unit/common/utils/utils.spec.ts +++ b/tests/unit/common/utils/utils.spec.ts @@ -1,6 +1,7 @@ import { ITileRange } from '@map-colonies/mc-utils'; import { configMock, registerDefaultConfig } from '../../../mocks/config'; import * as utils from '../../../../src/common/utils'; +import { fc1, fcTooHighResolution } from '../../../mocks/data'; const sanitizedBatchesExample: ITileRange[] = [ { minX: 1, minY: 0, maxX: 2, maxY: 1, zoom: 0 }, @@ -40,4 +41,42 @@ describe('Utils', () => { expect(result).toBe('test.test2.test3'); }); }); + + describe('FeatureCollection Utils', () => { + describe('generateGeoIdentifier', () => { + it('should return hashed unique id based on roi geometry and be consistent to same ROI', () => { + const result1 = utils.generateGeoIdentifier(fc1); + const result2 = utils.generateGeoIdentifier({ ...fc1 }); + const result3 = utils.generateGeoIdentifier(fcTooHighResolution); + expect(result1).toBe('1a26c1661df10eee54f9727fcdb8b71d'); + expect(result1 === result2).toBe(true); + expect(result1 === result3).toBe(false); + }); + }); + + describe('parseFeatureCollection', () => { + it('should return array of 2 IGeometry objects', () => { + const expectedObjectBase = { + zoomLevel: 5, + targetResolutionDeg: 0.02197265625, + targetResolutionMeter: 2445.98, + }; + const result = utils.parseFeatureCollection(fc1); + expect(result).toHaveLength(2); + expect(result[0]).toStrictEqual({ ...expectedObjectBase, geometry: fc1.features[0].geometry }); + expect(result[1]).toStrictEqual({ ...expectedObjectBase, geometry: fc1.features[1].geometry }); + }); + + it('should return array of 1 IGeometry objects', () => { + const expectedObjectBase = { + zoomLevel: 22, + targetResolutionDeg: 1.67638063430786e-7, + targetResolutionMeter: 0.0185, + }; + const result = utils.parseFeatureCollection(fcTooHighResolution); + expect(result).toHaveLength(1); + expect(result[0]).toStrictEqual({ ...expectedObjectBase, geometry: fcTooHighResolution.features[0].geometry }); + }); + }); + }); }); diff --git a/tests/unit/createPackage/models/createPackageModel.spec.ts b/tests/unit/createPackage/models/createPackageModel.spec.ts index dab8c34..f95b102 100644 --- a/tests/unit/createPackage/models/createPackageModel.spec.ts +++ b/tests/unit/createPackage/models/createPackageModel.spec.ts @@ -1,6 +1,9 @@ +/* eslint-disable @typescript-eslint/no-unsafe-member-access */ +/* eslint-disable @typescript-eslint/no-unsafe-assignment */ +/* eslint-disable @typescript-eslint/naming-convention */ import fs from 'fs'; import { sep } from 'path'; -import { BadRequestError } from '@map-colonies/error-types'; +import { BadRequestError, InsufficientStorage } from '@map-colonies/error-types'; import jsLogger from '@map-colonies/js-logger'; import { IJobResponse, OperationStatus } from '@map-colonies/mc-priority-queue'; import { LayerMetadata } from '@map-colonies/mc-model-types'; @@ -12,13 +15,40 @@ import { findPendingJobMock, updateJobMock, createMock, + findExportJobMock, + createExportMock, } from '../../../mocks/clients/jobManagerWrapper'; import { catalogManagerMock, findLayerMock } from '../../../mocks/clients/catalogManagerClient'; -import { ICreateJobResponse, ICreatePackage, IJobParameters, ITaskParameters, JobDuplicationParams } from '../../../../src/common/interfaces'; +import { + ExportVersion, + ICreateJobResponse, + ICreatePackage, + ICreatePackageRoi, + IJobParameters, + ITaskParameters, + JobDuplicationParams, + JobExportDuplicationParams, +} from '../../../../src/common/interfaces'; import { CreatePackageManager } from '../../../../src/createPackage/models/createPackageManager'; -import { completedJob, inProgressJob, layerFromCatalog, userInput } from '../../../mocks/data'; +import { + completedExportJob, + completedJob, + inProgressJob, + layerFromCatalog, + userInput, + metadataExportJson, + layerFromCatalogSample, + fc1, + featuresRecordsSampleFc1, + layerMetadataRoi, + pycswRecord, + inProgressExportJob, + fcNoIntersection, + fcTooHighResolution, +} from '../../../mocks/data'; import { configMock, registerDefaultConfig } from '../../../mocks/config'; import { METADA_JSON_FILE_EXTENSION } from '../../../../src/common/constants'; +import * as utils from '../../../../src/common/utils'; jest.mock('fs', () => { // eslint-disable-next-line @typescript-eslint/no-unsafe-return @@ -45,315 +75,677 @@ describe('CreatePackageManager', () => { jest.resetAllMocks(); jest.restoreAllMocks(); }); + /** + * @deprecated GetMap API - will be deprecated on future + */ + describe('GetMAP', () => { + describe('#create', () => { + it('should create job and return its job and task ids', async () => { + const req: ICreatePackage = { + dbId: layerFromCatalog.id, + bbox: [0, 1, 3, 5], + callbackURLs: ['testUrl'], + targetResolution: 0.0439453125, + crs: 'EPSG:4326', + }; - describe('#create', () => { - it('should create job and return its job and task ids', async () => { - const req: ICreatePackage = { - dbId: layerFromCatalog.id, - bbox: [0, 1, 3, 5], - callbackURLs: ['testUrl'], - targetResolution: 0.0439453125, - crs: 'EPSG:4326', - }; - - const expectedsanitizedBbox: BBox = [0, 0, 11.25, 11.25]; - const jobDupParams: JobDuplicationParams = { - resourceId: 'string', - version: '1.0', - dbId: layerFromCatalog.id, - zoomLevel: 4, - sanitizedBbox: expectedsanitizedBbox, - crs: 'EPSG:4326', - }; - - const expectedCreateJobResponse = { - jobId: '09e29fa8-7283-4334-b3a4-99f75922de59', - taskIds: ['66aa1e2e-784c-4178-b5a0-af962937d561'], - status: OperationStatus.IN_PROGRESS, - }; - const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); - - findLayerMock.mockResolvedValue(layerFromCatalog); - createMock.mockResolvedValue(expectedCreateJobResponse); - findCompletedJobMock.mockResolvedValue(undefined); - findInProgressJobMock.mockResolvedValue(undefined); - findPendingJobMock.mockResolvedValue(undefined); - validateFreeSpaceSpy.mockResolvedValue(true); - const res = await createPackageManager.createPackage(req); - - expect(res).toEqual(expectedCreateJobResponse); - expect(findLayerMock).toHaveBeenCalledWith(req.dbId); - expect(findLayerMock).toHaveBeenCalledTimes(1); - expect(createMock).toHaveBeenCalledTimes(1); - expect(findCompletedJobMock).toHaveBeenCalledWith(jobDupParams); - expect(findCompletedJobMock).toHaveBeenCalledTimes(1); - expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); - expect(findInProgressJobMock).toHaveBeenCalledTimes(1); - }); - - it('should create job and convert provided footprint to bbox', async () => { - const footprint: Polygon = { - type: 'Polygon', - coordinates: [ - [ - [25, 15], - [50, 15], - [50, 40], - [25, 40], - [25, 15], + const expectedsanitizedBbox: BBox = [0, 0, 11.25, 11.25]; + const jobDupParams: JobDuplicationParams = { + resourceId: 'string', + version: '1.0', + dbId: layerFromCatalog.id, + zoomLevel: 4, + sanitizedBbox: expectedsanitizedBbox, + crs: 'EPSG:4326', + }; + + const expectedCreateJobResponse = { + jobId: '09e29fa8-7283-4334-b3a4-99f75922de59', + taskIds: ['66aa1e2e-784c-4178-b5a0-af962937d561'], + status: OperationStatus.IN_PROGRESS, + }; + const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); + + findLayerMock.mockResolvedValue(layerFromCatalog); + createMock.mockResolvedValue(expectedCreateJobResponse); + findCompletedJobMock.mockResolvedValue(undefined); + findInProgressJobMock.mockResolvedValue(undefined); + findPendingJobMock.mockResolvedValue(undefined); + validateFreeSpaceSpy.mockResolvedValue(true); + const res = await createPackageManager.createPackage(req); + + expect(res).toEqual(expectedCreateJobResponse); + expect(findLayerMock).toHaveBeenCalledWith(req.dbId); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(createMock).toHaveBeenCalledTimes(1); + expect(findCompletedJobMock).toHaveBeenCalledWith(jobDupParams); + expect(findCompletedJobMock).toHaveBeenCalledTimes(1); + expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); + expect(findInProgressJobMock).toHaveBeenCalledTimes(1); + }); + + it('should create job and convert provided footprint to bbox', async () => { + const footprint: Polygon = { + type: 'Polygon', + coordinates: [ + [ + [25, 15], + [50, 15], + [50, 40], + [25, 40], + [25, 15], + ], ], - ], - }; - const req: ICreatePackage = { - dbId: layerFromCatalog.id, - bbox: footprint, - callbackURLs: ['testUrl'], - targetResolution: 0.0439453125, - crs: 'EPSG:4326', - }; - - const expectedsanitizedBbox: BBox = [22.5, 11.25, 56.25, 45]; - const jobDupParams: JobDuplicationParams = { - resourceId: 'string', - version: '1.0', - dbId: layerFromCatalog.id, - zoomLevel: 4, - sanitizedBbox: expectedsanitizedBbox, - crs: 'EPSG:4326', - }; - - const expectedCreateJobResponse = { - jobId: '09e29fa8-7283-4334-b3a4-99f75922de59', - taskIds: ['66aa1e2e-784c-4178-b5a0-af962937d561'], - status: OperationStatus.IN_PROGRESS, - }; - const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); - const normalize2PolygonSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { normalize2Polygon: jest.Mock }, 'normalize2Polygon'); - - findLayerMock.mockResolvedValue(layerFromCatalog); - createMock.mockResolvedValue(expectedCreateJobResponse); - findCompletedJobMock.mockResolvedValue(undefined); - findInProgressJobMock.mockResolvedValue(undefined); - findPendingJobMock.mockResolvedValue(undefined); - validateFreeSpaceSpy.mockResolvedValue(true); - const res = await createPackageManager.createPackage(req); - - expect(res).toEqual(expectedCreateJobResponse); - expect(findLayerMock).toHaveBeenCalledWith(req.dbId); - expect(findLayerMock).toHaveBeenCalledTimes(1); - expect(normalize2PolygonSpy).toHaveBeenCalledTimes(1); - expect(createMock).toHaveBeenCalledTimes(1); - expect(findCompletedJobMock).toHaveBeenCalledWith(jobDupParams); - expect(findCompletedJobMock).toHaveBeenCalledTimes(1); - expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); - expect(findInProgressJobMock).toHaveBeenCalledTimes(1); - }); + }; + const req: ICreatePackage = { + dbId: layerFromCatalog.id, + bbox: footprint, + callbackURLs: ['testUrl'], + targetResolution: 0.0439453125, + crs: 'EPSG:4326', + }; - it(`should create job and take original layer's resolution and sanitized bbox`, async () => { - const req: ICreatePackage = { - dbId: layerFromCatalog.id, - callbackURLs: ['testUrl'], - crs: 'EPSG:4326', - }; - - const expectedCreateJobResponse = { - jobId: '09e29fa8-7283-4334-b3a4-99f75922de59', - taskIds: ['66aa1e2e-784c-4178-b5a0-af962937d561'], - status: OperationStatus.IN_PROGRESS, - }; - - const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); - - const expectedsanitizedBbox: BBox = [0, -90, 180, 90]; - const expectedTargetResolution = layerFromCatalog.metadata.maxResolutionDeg; - - findLayerMock.mockResolvedValue(layerFromCatalog); - createMock.mockResolvedValue(expectedCreateJobResponse); - findCompletedJobMock.mockResolvedValue(undefined); - findInProgressJobMock.mockResolvedValue(undefined); - findPendingJobMock.mockResolvedValue(undefined); - validateFreeSpaceSpy.mockResolvedValue(true); - - const res = await createPackageManager.createPackage(req); - - expect(res).toEqual(expectedCreateJobResponse); - expect(findLayerMock).toHaveBeenCalledWith(req.dbId); - expect(findLayerMock).toHaveBeenCalledTimes(1); - expect(createMock).toHaveBeenCalledTimes(1); - expect(createMock).toHaveBeenCalledWith( - expect.objectContaining({ - targetResolution: expectedTargetResolution, + const expectedsanitizedBbox: BBox = [22.5, 11.25, 56.25, 45]; + const jobDupParams: JobDuplicationParams = { + resourceId: 'string', + version: '1.0', + dbId: layerFromCatalog.id, + zoomLevel: 4, + sanitizedBbox: expectedsanitizedBbox, + crs: 'EPSG:4326', + }; + + const expectedCreateJobResponse = { + jobId: '09e29fa8-7283-4334-b3a4-99f75922de59', + taskIds: ['66aa1e2e-784c-4178-b5a0-af962937d561'], + status: OperationStatus.IN_PROGRESS, + }; + const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); + const normalize2PolygonSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { normalize2Polygon: jest.Mock }, 'normalize2Polygon'); + + findLayerMock.mockResolvedValue(layerFromCatalog); + createMock.mockResolvedValue(expectedCreateJobResponse); + findCompletedJobMock.mockResolvedValue(undefined); + findInProgressJobMock.mockResolvedValue(undefined); + findPendingJobMock.mockResolvedValue(undefined); + validateFreeSpaceSpy.mockResolvedValue(true); + const res = await createPackageManager.createPackage(req); + + expect(res).toEqual(expectedCreateJobResponse); + expect(findLayerMock).toHaveBeenCalledWith(req.dbId); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(normalize2PolygonSpy).toHaveBeenCalledTimes(1); + expect(createMock).toHaveBeenCalledTimes(1); + expect(findCompletedJobMock).toHaveBeenCalledWith(jobDupParams); + expect(findCompletedJobMock).toHaveBeenCalledTimes(1); + expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); + expect(findInProgressJobMock).toHaveBeenCalledTimes(1); + }); + + it(`should create job and take original layer's resolution and sanitized bbox`, async () => { + const req: ICreatePackage = { + dbId: layerFromCatalog.id, + callbackURLs: ['testUrl'], + crs: 'EPSG:4326', + }; + + const expectedCreateJobResponse = { + jobId: '09e29fa8-7283-4334-b3a4-99f75922de59', + taskIds: ['66aa1e2e-784c-4178-b5a0-af962937d561'], + status: OperationStatus.IN_PROGRESS, + }; + + const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); + + const expectedsanitizedBbox: BBox = [0, -90, 180, 90]; + const expectedTargetResolution = layerFromCatalog.metadata.maxResolutionDeg; + + findLayerMock.mockResolvedValue(layerFromCatalog); + createMock.mockResolvedValue(expectedCreateJobResponse); + findCompletedJobMock.mockResolvedValue(undefined); + findInProgressJobMock.mockResolvedValue(undefined); + findPendingJobMock.mockResolvedValue(undefined); + validateFreeSpaceSpy.mockResolvedValue(true); + + const res = await createPackageManager.createPackage(req); + + expect(res).toEqual(expectedCreateJobResponse); + expect(findLayerMock).toHaveBeenCalledWith(req.dbId); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(createMock).toHaveBeenCalledTimes(1); + expect(createMock).toHaveBeenCalledWith( + expect.objectContaining({ + targetResolution: expectedTargetResolution, + sanitizedBbox: expectedsanitizedBbox, + }) + ); + }); + + it('should return job and task-ids of existing in pending job', async () => { + const expectedsanitizedBbox: BBox = [0, 2.8125, 25.3125, 42.1875]; + const jobDupParams: JobDuplicationParams = { + resourceId: layerFromCatalog.metadata.productId as string, + version: layerFromCatalog.metadata.productVersion as string, + dbId: layerFromCatalog.id, + zoomLevel: 7, + sanitizedBbox: expectedsanitizedBbox, + crs: userInput.crs as string, + }; + + findLayerMock.mockResolvedValue(layerFromCatalog); + createMock.mockResolvedValue(undefined); + updateJobMock.mockResolvedValue(undefined); + findCompletedJobMock.mockResolvedValue(undefined); + findInProgressJobMock.mockResolvedValue(undefined); + findPendingJobMock.mockResolvedValue(JSON.parse(JSON.stringify(inProgressJob))); + + await createPackageManager.createPackage(userInput); + + expect(findLayerMock).toHaveBeenCalledWith(layerFromCatalog.id); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(createMock).toHaveBeenCalledTimes(0); + expect(findCompletedJobMock).toHaveBeenNthCalledWith(1, jobDupParams); + expect(findCompletedJobMock).toHaveBeenNthCalledWith(2, jobDupParams); + expect(findCompletedJobMock).toHaveBeenCalledTimes(2); + expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); + expect(findInProgressJobMock).toHaveBeenCalledTimes(1); + expect(findPendingJobMock).toHaveBeenCalledWith(jobDupParams); + expect(findPendingJobMock).toHaveBeenCalledTimes(1); + }); + + it('should return job and task-ids of existing in progress job', async () => { + const expectedsanitizedBbox: BBox = [0, 2.8125, 25.3125, 42.1875]; + const jobDupParams: JobDuplicationParams = { + resourceId: layerFromCatalog.metadata.productId as string, + version: layerFromCatalog.metadata.productVersion as string, + dbId: layerFromCatalog.id, + zoomLevel: 7, + sanitizedBbox: expectedsanitizedBbox, + crs: userInput.crs as string, + }; + + findLayerMock.mockResolvedValue(layerFromCatalog); + createMock.mockResolvedValue(undefined); + updateJobMock.mockResolvedValue(undefined); + findCompletedJobMock.mockResolvedValue(undefined); + findInProgressJobMock.mockResolvedValue(JSON.parse(JSON.stringify(inProgressJob))); + + const res = await createPackageManager.createPackage(userInput); + const expectedReturn: ICreateJobResponse = { + id: inProgressJob.id, + taskIds: [(inProgressJob.tasks as unknown as IJobResponse[])[0].id], + status: OperationStatus.IN_PROGRESS, + }; + + expect(res).toEqual(expectedReturn); + expect(findLayerMock).toHaveBeenCalledWith(jobDupParams.dbId); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(createMock).toHaveBeenCalledTimes(0); + expect(findCompletedJobMock).toHaveBeenNthCalledWith(1, jobDupParams); + expect(findCompletedJobMock).toHaveBeenNthCalledWith(2, jobDupParams); + expect(findCompletedJobMock).toHaveBeenCalledTimes(2); + expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); + expect(findInProgressJobMock).toHaveBeenCalledTimes(1); + expect(findPendingJobMock).toHaveBeenCalledTimes(0); + }); + + it('should increase callbacks array of existing in progress job', async () => { + const expectedsanitizedBbox: BBox = [0, 2.8125, 25.3125, 42.1875]; + const jobDupParams: JobDuplicationParams = { + resourceId: layerFromCatalog.metadata.productId as string, + version: layerFromCatalog.metadata.productVersion as string, + dbId: layerFromCatalog.id, + zoomLevel: 7, sanitizedBbox: expectedsanitizedBbox, - }) - ); + crs: userInput.crs as string, + }; + const expirationDays: number = configMock.get('jobManager.expirationDays'); + const testExpirationDate = new Date(); + testExpirationDate.setDate(testExpirationDate.getDate() - expirationDays); + findLayerMock.mockResolvedValue(layerFromCatalog); + createMock.mockResolvedValue(undefined); + updateJobMock.mockResolvedValue(undefined); + findCompletedJobMock.mockResolvedValue(undefined); + findInProgressJobMock.mockResolvedValue(JSON.parse(JSON.stringify({ ...inProgressJob, expirationDate: testExpirationDate }))); + const jobUpdateParams = { + parameters: { + fileName: 'test.gpkg', + relativeDirectoryPath: 'test', + crs: 'EPSG:4326', + sanitizedBbox: [0, 0, 25, 41], + zoomLevel: 4, + exportVersion: ExportVersion.GETMAP, + callbacks: [ + { url: 'http://localhost:6969', bbox: [0, 0, 25, 41] }, + { url: 'http://new-added-callback-url.com', bbox: [-5, 3, 25, 41] }, + ], + targetResolution: 0.0439453125, + }, + }; + const res = await createPackageManager.createPackage({ ...userInput, callbackURLs: ['http://new-added-callback-url.com'] }); + const expectedReturn: ICreateJobResponse = { + id: inProgressJob.id, + taskIds: [(inProgressJob.tasks as unknown as IJobResponse[])[0].id], + status: OperationStatus.IN_PROGRESS, + }; + + expect(res).toEqual(expectedReturn); + expect(findLayerMock).toHaveBeenCalledWith(jobDupParams.dbId); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(createMock).toHaveBeenCalledTimes(0); + expect(findCompletedJobMock).toHaveBeenNthCalledWith(1, jobDupParams); + expect(findCompletedJobMock).toHaveBeenNthCalledWith(2, jobDupParams); + expect(findCompletedJobMock).toHaveBeenCalledTimes(2); + expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); + expect(findInProgressJobMock).toHaveBeenCalledTimes(1); + expect(updateJobMock).toHaveBeenCalledWith('fa3ab609-377a-4d96-bf0b-e0bb72f683b8', jobUpdateParams); + expect(findPendingJobMock).toHaveBeenCalledTimes(0); + }); + + it('should throw bad request error when requested resolution is higher than the layer resolution', async () => { + const layer = { ...layerFromCatalog, metadata: { ...layerFromCatalog.metadata, maxResolutionDeg: 0.072 } }; + findLayerMock.mockResolvedValue(layer); + + const action = async () => createPackageManager.createPackage(userInput); + + await expect(action).rejects.toThrow(BadRequestError); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(findLayerMock).toHaveBeenCalledWith(layer.id); + }); }); - - it('should return job and task-ids of existing in pending job', async () => { - const expectedsanitizedBbox: BBox = [0, 2.8125, 25.3125, 42.1875]; - const jobDupParams: JobDuplicationParams = { - resourceId: layerFromCatalog.metadata.productId as string, - version: layerFromCatalog.metadata.productVersion as string, - dbId: layerFromCatalog.id, - zoomLevel: 7, - sanitizedBbox: expectedsanitizedBbox, - crs: userInput.crs as string, - }; - - findLayerMock.mockResolvedValue(layerFromCatalog); - createMock.mockResolvedValue(undefined); - updateJobMock.mockResolvedValue(undefined); - findCompletedJobMock.mockResolvedValue(undefined); - findInProgressJobMock.mockResolvedValue(undefined); - findPendingJobMock.mockResolvedValue(JSON.parse(JSON.stringify(inProgressJob))); - - await createPackageManager.createPackage(userInput); - - expect(findLayerMock).toHaveBeenCalledWith(layerFromCatalog.id); - expect(findLayerMock).toHaveBeenCalledTimes(1); - expect(createMock).toHaveBeenCalledTimes(0); - expect(findCompletedJobMock).toHaveBeenNthCalledWith(1, jobDupParams); - expect(findCompletedJobMock).toHaveBeenNthCalledWith(2, jobDupParams); - expect(findCompletedJobMock).toHaveBeenCalledTimes(2); - expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); - expect(findInProgressJobMock).toHaveBeenCalledTimes(1); - expect(findPendingJobMock).toHaveBeenCalledWith(jobDupParams); - expect(findPendingJobMock).toHaveBeenCalledTimes(1); + describe('#createMetadata', () => { + it('should create metadata.json file with the correct parameters', async () => { + const fileName = 'file'; + const directoryName = '/tmp/gpkgDir'; + + const mockGgpkgPath = `${directoryName}/${fileName}`; + + findLayerMock.mockResolvedValue(layerFromCatalog); + + await createPackageManager.createJsonMetadata(mockGgpkgPath, completedJob); + + const expectedFileName = `${directoryName}${sep}${fileName}${METADA_JSON_FILE_EXTENSION}`; + const expectedMetadata: LayerMetadata = { + ...layerFromCatalog.metadata, + maxResolutionDeg: completedJob.parameters.targetResolution, + footprint: { + type: 'Feature', + bbox: [0, 0, 25, 41], + properties: {}, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [0, 0], + [25, 0], + [25, 41], + [0, 41], + [0, 0], + ], + ], + }, + }, + }; + + expect(fs.promises.writeFile).toHaveBeenCalledTimes(1); + expect(fs.promises.writeFile).toHaveBeenCalledWith(expectedFileName, JSON.stringify(expectedMetadata)); + }); }); - - it('should return job and task-ids of existing in progress job', async () => { - const expectedsanitizedBbox: BBox = [0, 2.8125, 25.3125, 42.1875]; - const jobDupParams: JobDuplicationParams = { - resourceId: layerFromCatalog.metadata.productId as string, - version: layerFromCatalog.metadata.productVersion as string, - dbId: layerFromCatalog.id, - zoomLevel: 7, - sanitizedBbox: expectedsanitizedBbox, - crs: userInput.crs as string, - }; - - findLayerMock.mockResolvedValue(layerFromCatalog); - createMock.mockResolvedValue(undefined); - updateJobMock.mockResolvedValue(undefined); - findCompletedJobMock.mockResolvedValue(undefined); - findInProgressJobMock.mockResolvedValue(JSON.parse(JSON.stringify(inProgressJob))); - - const res = await createPackageManager.createPackage(userInput); - const expectedReturn: ICreateJobResponse = { - id: inProgressJob.id, - taskIds: [(inProgressJob.tasks as unknown as IJobResponse[])[0].id], - status: OperationStatus.IN_PROGRESS, - }; - - expect(res).toEqual(expectedReturn); - expect(findLayerMock).toHaveBeenCalledWith(jobDupParams.dbId); - expect(findLayerMock).toHaveBeenCalledTimes(1); - expect(createMock).toHaveBeenCalledTimes(0); - expect(findCompletedJobMock).toHaveBeenNthCalledWith(1, jobDupParams); - expect(findCompletedJobMock).toHaveBeenNthCalledWith(2, jobDupParams); - expect(findCompletedJobMock).toHaveBeenCalledTimes(2); - expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); - expect(findInProgressJobMock).toHaveBeenCalledTimes(1); - expect(findPendingJobMock).toHaveBeenCalledTimes(0); + }); + describe('ROI', () => { + describe('#createExportMetadata', () => { + it('should create metadata.json file with the correct parameters', async () => { + const gpkgLocation = configMock.get('gpkgsLocation'); + const concatFsPathsSpy = jest.spyOn(utils, 'concatFsPaths'); + const parseFeatureCollectionSpy = jest.spyOn(utils, 'parseFeatureCollection'); + findLayerMock.mockResolvedValue(layerFromCatalogSample); + + await createPackageManager.createExportJsonMetadata({ ...completedExportJob }); + expect(fs.promises.writeFile).toHaveBeenCalledTimes(1); + expect(parseFeatureCollectionSpy).toHaveBeenCalledTimes(1); + expect(parseFeatureCollectionSpy).toHaveBeenCalledWith(completedExportJob.parameters.roi); + expect(concatFsPathsSpy).toHaveBeenCalledTimes(1); + expect(concatFsPathsSpy).toHaveBeenCalledWith( + gpkgLocation, + completedExportJob.parameters.relativeDirectoryPath, + completedExportJob.parameters.fileNamesTemplates.metadataURI + ); + const expectedFileName = utils.concatFsPaths( + gpkgLocation, + completedExportJob.parameters.relativeDirectoryPath, + completedExportJob.parameters.fileNamesTemplates.metadataURI + ); + expect(fs.promises.writeFile).toHaveBeenCalledWith(expectedFileName, JSON.stringify(metadataExportJson)); + }); + + it('should fail on metadata.json creation(because finding layer from catalog)', async () => { + const concatFsPathsSpy = jest.spyOn(utils, 'concatFsPaths'); + const parseFeatureCollectionSpy = jest.spyOn(utils, 'parseFeatureCollection'); + findLayerMock.mockRejectedValue({ msg: 'Layer Not found' }); + const action = async () => createPackageManager.createExportJsonMetadata(completedExportJob); + await expect(action()).rejects.toStrictEqual({ msg: 'Layer Not found' }); + expect(parseFeatureCollectionSpy).toHaveBeenCalledTimes(0); + expect(concatFsPathsSpy).toHaveBeenCalledTimes(0); + expect(fs.promises.writeFile).toHaveBeenCalledTimes(0); + }); }); + describe('#create', () => { + it('should create job and return its job and task ids', async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + roi: fc1, + callbackURLs: ['testUrl'], + crs: 'EPSG:4326', + }; - it('should increase callbacks array of existing in progress job', async () => { - const expectedsanitizedBbox: BBox = [0, 2.8125, 25.3125, 42.1875]; - const jobDupParams: JobDuplicationParams = { - resourceId: layerFromCatalog.metadata.productId as string, - version: layerFromCatalog.metadata.productVersion as string, - dbId: layerFromCatalog.id, - zoomLevel: 7, - sanitizedBbox: expectedsanitizedBbox, - crs: userInput.crs as string, - }; - const expirationDays: number = configMock.get('jobManager.expirationDays'); - const testExpirationDate = new Date(); - testExpirationDate.setDate(testExpirationDate.getDate() - expirationDays); - findLayerMock.mockResolvedValue(layerFromCatalog); - createMock.mockResolvedValue(undefined); - updateJobMock.mockResolvedValue(undefined); - findCompletedJobMock.mockResolvedValue(undefined); - findInProgressJobMock.mockResolvedValue(JSON.parse(JSON.stringify({ ...inProgressJob, expirationDate: testExpirationDate }))); - const jobUpdateParams = { - parameters: { - fileName: 'test.gpkg', - relativeDirectoryPath: 'test', + const jobDupParams: JobExportDuplicationParams = { + resourceId: 'string', + version: '1.0', + dbId: pycswRecord.id, + roi: fc1, crs: 'EPSG:4326', - sanitizedBbox: [0, 0, 25, 41], - zoomLevel: 4, - callbacks: [ - { url: 'http://localhost:6969', bbox: [0, 0, 25, 41] }, - { url: 'http://new-added-callback-url.com', bbox: [-5, 3, 25, 41] }, - ], - targetResolution: 0.0439453125, - }, - }; - const res = await createPackageManager.createPackage({ ...userInput, callbackURLs: ['http://new-added-callback-url.com'] }); - const expectedReturn: ICreateJobResponse = { - id: inProgressJob.id, - taskIds: [(inProgressJob.tasks as unknown as IJobResponse[])[0].id], - status: OperationStatus.IN_PROGRESS, - }; - - expect(res).toEqual(expectedReturn); - expect(findLayerMock).toHaveBeenCalledWith(jobDupParams.dbId); - expect(findLayerMock).toHaveBeenCalledTimes(1); - expect(createMock).toHaveBeenCalledTimes(0); - expect(findCompletedJobMock).toHaveBeenNthCalledWith(1, jobDupParams); - expect(findCompletedJobMock).toHaveBeenNthCalledWith(2, jobDupParams); - expect(findCompletedJobMock).toHaveBeenCalledTimes(2); - expect(findInProgressJobMock).toHaveBeenCalledWith(jobDupParams); - expect(findInProgressJobMock).toHaveBeenCalledTimes(1); - expect(updateJobMock).toHaveBeenCalledWith('fa3ab609-377a-4d96-bf0b-e0bb72f683b8', jobUpdateParams); - expect(findPendingJobMock).toHaveBeenCalledTimes(0); - }); + }; + + const expectedCreateJobResponse = { + jobId: '09e29fa8-7283-4334-b3a4-99f75922de59', + taskIds: ['66aa1e2e-784c-4178-b5a0-af962937d561'], + status: OperationStatus.IN_PROGRESS, + }; + const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); + const generateExportFileNamesSpy = jest.spyOn( + CreatePackageManager.prototype as unknown as { generateExportFileNames: jest.Mock }, + 'generateExportFileNames' + ); + + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + findLayerMock.mockResolvedValue({ ...testPycswRecord }); + createExportMock.mockResolvedValue(expectedCreateJobResponse); + findExportJobMock.mockResolvedValue(undefined); + + validateFreeSpaceSpy.mockResolvedValue(true); + const res = await createPackageManager.createPackageRoi(req); + + expect(res).toEqual(expectedCreateJobResponse); + expect(findLayerMock).toHaveBeenCalledWith(req.dbId); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(createExportMock).toHaveBeenCalledTimes(1); + expect(findExportJobMock).toHaveBeenCalledTimes(3); + expect(findExportJobMock.mock.calls[0]).toEqual([OperationStatus.COMPLETED, jobDupParams]); + expect(findExportJobMock.mock.calls[1]).toEqual([OperationStatus.IN_PROGRESS, jobDupParams, true]); + expect(findExportJobMock.mock.calls[2]).toEqual([OperationStatus.PENDING, jobDupParams, true]); + expect(generateExportFileNamesSpy).toHaveBeenCalledTimes(1); + expect(generateExportFileNamesSpy).toHaveBeenCalledWith( + testPycswRecord.metadata.productType, + testPycswRecord.metadata.productId, + testPycswRecord.metadata.productVersion, + featuresRecordsSampleFc1 + ); + expect(res).toBe(expectedCreateJobResponse); + }); + + it(`should create job and take original layer's resolution and footprint as ROI`, async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + callbackURLs: ['testUrl'], + crs: 'EPSG:4326', + }; + + const expectedCreateJobResponse = { + jobId: '09e29fa8-7283-4334-b3a4-99f75922de59', + taskIds: ['66aa1e2e-784c-4178-b5a0-af962937d561'], + status: OperationStatus.IN_PROGRESS, + }; + + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + + const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); + createExportMock.mockResolvedValue(expectedCreateJobResponse); + findLayerMock.mockResolvedValue({ ...testPycswRecord }); + findExportJobMock.mockResolvedValue(undefined); + + validateFreeSpaceSpy.mockResolvedValue(true); + + const res = await createPackageManager.createPackageRoi(req); + expect(res).toEqual(expectedCreateJobResponse); + expect(findLayerMock).toHaveBeenCalledWith(req.dbId); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(createExportMock).toHaveBeenCalledTimes(1); + expect(findExportJobMock).toHaveBeenCalledTimes(3); + expect(createExportMock).toHaveBeenCalledWith( + expect.objectContaining({ + roi: layerMetadataRoi, + }) + ); + }); + + it('should return callbackParam(webhook) for existing completed job', async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + callbackURLs: ['testUrl'], + crs: 'EPSG:4326', + roi: fc1, + }; + const jobDupParams: JobExportDuplicationParams = { + resourceId: pycswRecord.metadata.productId as string, + version: pycswRecord.metadata.productVersion as string, + dbId: pycswRecord.id, + crs: userInput.crs as string, + roi: fc1, + }; + + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + + findLayerMock.mockResolvedValue({ ...testPycswRecord }); + createExportMock.mockResolvedValue(undefined); + findExportJobMock.mockResolvedValue(JSON.parse(JSON.stringify(completedExportJob))); + updateJobMock.mockResolvedValue(undefined); + + const res = await createPackageManager.createPackageRoi(req); + expect(findLayerMock).toHaveBeenCalledWith(pycswRecord.id); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(findExportJobMock).toHaveBeenCalledTimes(1); + expect(findExportJobMock.mock.calls[0]).toEqual([OperationStatus.COMPLETED, jobDupParams]); + expect(createExportMock).toHaveBeenCalledTimes(0); + expect(JSON.stringify(res)).toStrictEqual( + JSON.stringify({ ...completedExportJob.parameters.callbackParams, status: OperationStatus.COMPLETED }) + ); + }); + + it('should return job and task-ids of existing pending job', async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + callbackURLs: ['testUrl'], + crs: 'EPSG:4326', + roi: fc1, + }; + const jobDupParams: JobExportDuplicationParams = { + resourceId: pycswRecord.metadata.productId as string, + version: pycswRecord.metadata.productVersion as string, + dbId: pycswRecord.id, + crs: userInput.crs as string, + roi: fc1, + }; + + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + + findLayerMock.mockResolvedValue({ ...testPycswRecord }); + createExportMock.mockResolvedValue(undefined); + findExportJobMock.mockResolvedValueOnce(undefined); + findExportJobMock.mockResolvedValueOnce(undefined); + findExportJobMock.mockResolvedValueOnce(JSON.parse(JSON.stringify(inProgressExportJob))); + findExportJobMock.mockResolvedValueOnce(undefined); + updateJobMock.mockResolvedValue(undefined); + + const res = await createPackageManager.createPackageRoi(req); + expect(findLayerMock).toHaveBeenCalledWith(pycswRecord.id); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(findExportJobMock).toHaveBeenCalledTimes(4); + expect(findExportJobMock.mock.calls[0]).toEqual([OperationStatus.COMPLETED, jobDupParams]); + expect(findExportJobMock.mock.calls[1]).toEqual([OperationStatus.IN_PROGRESS, jobDupParams, true]); + expect(findExportJobMock.mock.calls[2]).toEqual([OperationStatus.PENDING, jobDupParams, true]); + expect(findExportJobMock.mock.calls[3]).toEqual([OperationStatus.COMPLETED, jobDupParams]); + expect(createExportMock).toHaveBeenCalledTimes(0); + expect(res).toStrictEqual({ + id: inProgressExportJob.id, + taskIds: ['1f765695-338b-4752-b182-a8cbae3c610e'], + status: OperationStatus.IN_PROGRESS, + }); + }); + + it('should return job and task-ids of existing in progress job', async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + callbackURLs: ['testUrl'], + crs: 'EPSG:4326', + roi: fc1, + }; + const jobDupParams: JobExportDuplicationParams = { + resourceId: pycswRecord.metadata.productId as string, + version: pycswRecord.metadata.productVersion as string, + dbId: pycswRecord.id, + crs: userInput.crs as string, + roi: fc1, + }; + + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + + findLayerMock.mockResolvedValue({ ...testPycswRecord }); + createExportMock.mockResolvedValue(undefined); + findExportJobMock.mockResolvedValueOnce(undefined); + findExportJobMock.mockResolvedValueOnce(JSON.parse(JSON.stringify(inProgressExportJob))); + findExportJobMock.mockResolvedValueOnce(undefined); + updateJobMock.mockResolvedValue(undefined); + + const res = await createPackageManager.createPackageRoi(req); + expect(findLayerMock).toHaveBeenCalledWith(pycswRecord.id); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(findExportJobMock).toHaveBeenCalledTimes(3); + expect(findExportJobMock.mock.calls[0]).toEqual([OperationStatus.COMPLETED, jobDupParams]); + expect(findExportJobMock.mock.calls[1]).toEqual([OperationStatus.IN_PROGRESS, jobDupParams, true]); + expect(findExportJobMock.mock.calls[2]).toEqual([OperationStatus.COMPLETED, jobDupParams]); + expect(createExportMock).toHaveBeenCalledTimes(0); + expect(res).toStrictEqual({ + id: inProgressExportJob.id, + taskIds: ['1f765695-338b-4752-b182-a8cbae3c610e'], + status: OperationStatus.IN_PROGRESS, + }); + }); + + it('should increase callbacks array of existing in progress job', async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + callbackURLs: ['http://new-added-callback-url.com'], + crs: 'EPSG:4326', + roi: fc1, + }; + + const jobDupParams: JobExportDuplicationParams = { + resourceId: pycswRecord.metadata.productId as string, + version: pycswRecord.metadata.productVersion as string, + dbId: pycswRecord.id, + crs: userInput.crs as string, + roi: fc1, + }; + const expirationDays: number = configMock.get('jobManager.expirationDays'); + const testExpirationDate = new Date(); + testExpirationDate.setDate(testExpirationDate.getDate() - expirationDays); + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + + findLayerMock.mockResolvedValue({ ...testPycswRecord }); + createExportMock.mockResolvedValue(undefined); + updateJobMock.mockResolvedValue(undefined); + findExportJobMock.mockResolvedValueOnce(undefined); + findExportJobMock.mockResolvedValueOnce(JSON.parse(JSON.stringify({ ...inProgressExportJob, expirationDate: testExpirationDate }))); + findExportJobMock.mockResolvedValueOnce(undefined); + const jobUpdateParams = { + parameters: { + crs: 'EPSG:4326', + exportVersion: ExportVersion.ROI, + callbacks: [ + { url: 'http://localhost:6969', roi: fc1 }, + { url: 'http://new-added-callback-url.com', roi: fc1 }, + ], + roi: fc1, + fileNamesTemplates: inProgressExportJob.parameters.fileNamesTemplates, + gpkgEstimatedSize: inProgressExportJob.parameters.gpkgEstimatedSize, + relativeDirectoryPath: inProgressExportJob.parameters.relativeDirectoryPath, + }, + }; + const res = await createPackageManager.createPackageRoi({ ...req, callbackURLs: ['http://new-added-callback-url.com'] }); + const expectedReturn: ICreateJobResponse = { + id: inProgressExportJob.id, + taskIds: ['1f765695-338b-4752-b182-a8cbae3c610e'], + status: OperationStatus.IN_PROGRESS, + }; + + expect(res).toEqual(expectedReturn); + expect(findLayerMock).toHaveBeenCalledWith(pycswRecord.id); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(findExportJobMock).toHaveBeenCalledTimes(3); + expect(findExportJobMock.mock.calls[0]).toEqual([OperationStatus.COMPLETED, jobDupParams]); + expect(findExportJobMock.mock.calls[1]).toEqual([OperationStatus.IN_PROGRESS, jobDupParams, true]); + expect(findExportJobMock.mock.calls[2]).toEqual([OperationStatus.COMPLETED, jobDupParams]); + expect(createExportMock).toHaveBeenCalledTimes(0); + expect(updateJobMock).toHaveBeenCalledWith(inProgressExportJob.id, jobUpdateParams); + }); + + it('should throw bad request error when requested feature is without intersection with layer geometry', async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + callbackURLs: ['http://new-added-callback-url.com'], + crs: 'EPSG:4326', + roi: fcNoIntersection, + }; - it('should throw bad request error when requested resolution is higher than the layer resolution', async () => { - const layer = { ...layerFromCatalog, metadata: { ...layerFromCatalog.metadata, maxResolutionDeg: 0.072 } }; - findLayerMock.mockResolvedValue(layer); + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + findLayerMock.mockResolvedValue({ ...testPycswRecord }); - const action = async () => createPackageManager.createPackage(userInput); + const action = async () => createPackageManager.createPackageRoi(req); - await expect(action).rejects.toThrow(BadRequestError); - expect(findLayerMock).toHaveBeenCalledTimes(1); - expect(findLayerMock).toHaveBeenCalledWith(layer.id); - }); - }); + await expect(action).rejects.toThrow(BadRequestError); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(findLayerMock).toHaveBeenCalledWith(pycswRecord.id); + }); - it('should create metada.json file with the correct parameters', async () => { - const fileName = 'file'; - const directoryName = '/tmp/gpkgDir'; + it('should throw bad request error when requested feature maxDegResolution is higher than the layer resolution', async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + callbackURLs: ['http://new-added-callback-url.com'], + crs: 'EPSG:4326', + roi: fcTooHighResolution, + }; - const mockGgpkgPath = `${directoryName}/${fileName}`; + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + findLayerMock.mockResolvedValue({ ...testPycswRecord }); - findLayerMock.mockResolvedValue(layerFromCatalog); + const action = async () => createPackageManager.createPackageRoi(req); - await createPackageManager.createJsonMetadata(mockGgpkgPath, completedJob); + await expect(action).rejects.toThrow(BadRequestError); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(findLayerMock).toHaveBeenCalledWith(pycswRecord.id); + }); - const expectedFileName = `${directoryName}${sep}${fileName}${METADA_JSON_FILE_EXTENSION}`; - const expectedMetadata: LayerMetadata = { - ...layerFromCatalog.metadata, - maxResolutionDeg: completedJob.parameters.targetResolution, - footprint: { - type: 'Feature', - bbox: [0, 0, 25, 41], - properties: {}, - geometry: { - type: 'Polygon', - coordinates: [ - [ - [0, 0], - [25, 0], - [25, 41], - [0, 41], - [0, 0], - ], - ], - }, - }, - }; + it('should throw bad request error when requested gpkg estimated size is larger than storage', async () => { + const req: ICreatePackageRoi = { + dbId: pycswRecord.id, + roi: fc1, + callbackURLs: ['testUrl'], + crs: 'EPSG:4326', + }; - expect(fs.promises.writeFile).toHaveBeenCalledTimes(1); - expect(fs.promises.writeFile).toHaveBeenCalledWith(expectedFileName, JSON.stringify(expectedMetadata)); + const validateFreeSpaceSpy = jest.spyOn(CreatePackageManager.prototype as unknown as { validateFreeSpace: jest.Mock }, 'validateFreeSpace'); + + const testPycswRecord = JSON.parse(JSON.stringify(pycswRecord)); + findLayerMock.mockResolvedValue({ ...testPycswRecord }); + findExportJobMock.mockResolvedValue(undefined); + validateFreeSpaceSpy.mockResolvedValue(false); + + const action = async () => createPackageManager.createPackageRoi(req); + await expect(action).rejects.toThrow(InsufficientStorage); + expect(findLayerMock).toHaveBeenCalledTimes(1); + expect(findLayerMock).toHaveBeenCalledWith(pycswRecord.id); + expect(createExportMock).toHaveBeenCalledTimes(0); + }); + }); }); }); diff --git a/tests/unit/createPackage/models/tasksModel.spec.ts b/tests/unit/createPackage/models/tasksModel.spec.ts index 347c0ec..8582e45 100644 --- a/tests/unit/createPackage/models/tasksModel.spec.ts +++ b/tests/unit/createPackage/models/tasksModel.spec.ts @@ -1,14 +1,27 @@ import { sep } from 'path'; import jsLogger from '@map-colonies/js-logger'; -import { OperationStatus } from '@map-colonies/mc-priority-queue'; +import { IUpdateJobBody, OperationStatus } from '@map-colonies/mc-priority-queue'; import { NotFoundError } from '@map-colonies/error-types'; import { ITaskStatusResponse, TasksManager } from '../../../../src/tasks/models/tasksManager'; -import { ICallbackDataBase, ITaskParameters, JobResponse, TaskResponse } from '../../../../src/common/interfaces'; -import { registerDefaultConfig } from '../../../mocks/config'; +import { + ICallbackDataBase, + ICallbackDataExportBase, + IJobExportParameters, + ITaskParameters, + JobExportResponse, + JobResponse, + TaskResponse, +} from '../../../../src/common/interfaces'; +import { configMock, registerDefaultConfig } from '../../../mocks/config'; import { callbackClientMock, sendMock } from '../../../mocks/clients/callbackClient'; -import { createJsonMetadataMock, packageManagerMock } from '../../../mocks/clients/packageManager'; -import { jobManagerWrapperMock, getInProgressJobsMock as getInProgressJobsMock, updateJobMock } from '../../../mocks/clients/jobManagerWrapper'; -import { mockJob } from '../../../mocks/data/mockJob'; +import { createExportJsonMetadataMock, createJsonMetadataMock, packageManagerMock } from '../../../mocks/clients/packageManager'; +import { + jobManagerWrapperMock, + getInProgressJobsMock as getInProgressJobsMock, + updateJobMock, + getExportJobsMock, +} from '../../../mocks/clients/jobManagerWrapper'; +import { mockCompletedJob, mockJob } from '../../../mocks/data/mockJob'; import * as utils from '../../../../src/common/utils'; let tasksManager: TasksManager; @@ -25,238 +38,546 @@ describe('TasksManager', () => { jest.resetAllMocks(); jest.restoreAllMocks(); }); - - describe('#getTaskStatusByJobId', () => { - it('should throw NotFoundError when jobId is not exists', async () => { - const emptyTasksResponse: TaskResponse[] = []; - - getTasksByJobIdStub = jest.fn(); - jobManagerWrapperMock.getTasksByJobId = getTasksByJobIdStub.mockResolvedValue(emptyTasksResponse); - - const action = async () => tasksManager.getTaskStatusByJobId('09e29fa8-7283-4334-b3a4-99f75922de59'); - - await expect(action).rejects.toThrow(NotFoundError); - expect(getTasksByJobIdStub).toHaveBeenCalledTimes(1); - }); - - it('should successfully return task status by jobId', async () => { - const tasksResponse: TaskResponse[] = [ - { - id: '0a5552f7-01eb-40af-a912-eed8fa9e1561', - jobId: '0a5552f7-01eb-40af-a912-eed8fa9e1568', - type: 'rasterTilesExporterd', - description: '', - parameters: {} as unknown as ITaskParameters, - status: OperationStatus.IN_PROGRESS, - percentage: 23, - reason: '', - attempts: 0, - resettable: true, - created: '2022-08-02T13:02:18.475Z', - updated: '2022-08-02T15:01:56.658Z', - }, - ]; - - getTasksByJobIdStub = jest.fn(); - jobManagerWrapperMock.getTasksByJobId = getTasksByJobIdStub.mockResolvedValue(tasksResponse); - - const result = tasksManager.getTaskStatusByJobId('0a5552f7-01eb-40af-a912-eed8fa9e1568'); - const expectedResult: ITaskStatusResponse = { - percentage: tasksResponse[0].percentage, - status: tasksResponse[0].status, - }; - await expect(result).resolves.not.toThrow(); - await expect(result).resolves.toEqual(expectedResult); - expect(getTasksByJobIdStub).toHaveBeenCalledTimes(1); - }); - }); - describe('#getJobsByTaskStatus', () => { - it('should return completed job with no failed jobs', async () => { - const jobs: JobResponse[] = []; - const completedMockJob = { ...mockJob, completedTasks: 1 }; - jobs.push(completedMockJob); - getInProgressJobsMock.mockResolvedValue(jobs); - - const jobsStatus = await tasksManager.getJobsByTaskStatus(); - - expect(jobsStatus.completedJobs?.length).toBe(1); - expect(jobsStatus.failedJobs?.length).toBe(0); - expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); - }); - - it('should return failed job with no completed jobs', async () => { - const jobs: JobResponse[] = []; - const failedMockJob = { ...mockJob, failedTasks: 1 }; - jobs.push(failedMockJob); - getInProgressJobsMock.mockResolvedValue(jobs); - - const jobsStatus = await tasksManager.getJobsByTaskStatus(); - - expect(jobsStatus.completedJobs?.length).toBe(0); - expect(jobsStatus.failedJobs?.length).toBe(1); - expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); - }); - - it('should return completed job and failed job', async () => { - const jobs: JobResponse[] = []; - const completedMockJob = { ...mockJob, completedTasks: 1 }; - const failedMockJob = { ...mockJob, failedTasks: 1 }; - jobs.push(completedMockJob, failedMockJob); - getInProgressJobsMock.mockResolvedValue(jobs); - - const jobsStatus = await tasksManager.getJobsByTaskStatus(); - - expect(jobsStatus.completedJobs?.length).toBe(1); - expect(jobsStatus.failedJobs?.length).toBe(1); - expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); - }); - - it('should return an empty jobs response if task is in progress', async () => { - const jobs: JobResponse[] = []; - - const inProgressMockJob = { ...mockJob, inProgressTasks: 1 }; - jobs.push(inProgressMockJob); - getInProgressJobsMock.mockResolvedValue(jobs); - - const jobsStatus = await tasksManager.getJobsByTaskStatus(); - - expect(jobsStatus.completedJobs?.length).toBe(0); - expect(jobsStatus.failedJobs?.length).toBe(0); - expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); - }); - - it('should return an empty jobs response if task is in pending', async () => { - const jobs: JobResponse[] = []; - const pendingMockJob = { ...mockJob, pendingTasks: 1 }; - jobs.push(pendingMockJob); - getInProgressJobsMock.mockResolvedValue(jobs); - - const jobsStatus = await tasksManager.getJobsByTaskStatus(); - - expect(jobsStatus.completedJobs?.length).toBe(0); - expect(jobsStatus.failedJobs?.length).toBe(0); - expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + describe('GetMap', () => { + /** + * @deprecated GetMap API - will be deprecated on future + */ + describe('#getJobsByTaskStatus', () => { + it('should return completed job with no failed jobs', async () => { + const jobs: JobResponse[] = []; + const completedMockJob = { ...mockJob, completedTasks: 1 }; + jobs.push(completedMockJob); + getInProgressJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(1); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return failed job with no completed jobs', async () => { + const jobs: JobResponse[] = []; + const failedMockJob = { ...mockJob, failedTasks: 1 }; + jobs.push(failedMockJob); + getInProgressJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(1); + expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return completed job and failed job', async () => { + const jobs: JobResponse[] = []; + const completedMockJob = { ...mockJob, completedTasks: 1 }; + const failedMockJob = { ...mockJob, failedTasks: 1 }; + jobs.push(completedMockJob, failedMockJob); + getInProgressJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(1); + expect(jobsStatus.failedJobs?.length).toBe(1); + expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return an empty jobs response if task is in progress', async () => { + const jobs: JobResponse[] = []; + + const inProgressMockJob = { ...mockJob, inProgressTasks: 1 }; + jobs.push(inProgressMockJob); + getInProgressJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return an empty jobs response if task is in pending', async () => { + const jobs: JobResponse[] = []; + const pendingMockJob = { ...mockJob, pendingTasks: 1 }; + jobs.push(pendingMockJob); + getInProgressJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return an empty jobs response if task is in expired', async () => { + const jobs: JobResponse[] = []; + const expiredMockJob = { ...mockJob, expiredTasks: 1 }; + jobs.push(expiredMockJob); + getInProgressJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return an empty jobs response if task is in aborted', async () => { + const jobs: JobResponse[] = []; + const abortedMockJob = { ...mockJob, abortedTasks: 1 }; + jobs.push(abortedMockJob); + getInProgressJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + }); }); - it('should return an empty jobs response if task is in expired', async () => { - const jobs: JobResponse[] = []; - const expiredMockJob = { ...mockJob, expiredTasks: 1 }; - jobs.push(expiredMockJob); - getInProgressJobsMock.mockResolvedValue(jobs); - - const jobsStatus = await tasksManager.getJobsByTaskStatus(); - - expect(jobsStatus.completedJobs?.length).toBe(0); - expect(jobsStatus.failedJobs?.length).toBe(0); - expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + /** + * @deprecated GetMap API - will be deprecated on future + */ + describe('#sendCallbacks', () => { + it('should return callback data with the expected params for success jobs', async () => { + sendMock.mockResolvedValue(200); + const getFileSizeSpy = jest.spyOn(utils, 'getFileSize'); + getFileSizeSpy.mockResolvedValue(2000); + const expirationTime = new Date(); + const expectedCallbackData: ICallbackDataBase = { + fileUri: `http://download-service/downloads/test${sep}test.gpkg`, + expirationTime: expirationTime, + fileSize: 2000, + dbId: '880a9316-0f10-4874-92e2-a62d587a1169', + packageName: 'test.gpkg', + requestId: 'b729f0e0-af64-4c2c-ba4e-e799e2f3df0f', + targetResolution: 0.072, + success: true, + errorReason: undefined, + }; + + const callbackData = await tasksManager.sendCallbacks(mockJob, expirationTime); + expect(callbackData).toEqual(expectedCallbackData); + }); + + it('should return callback data with the expected params for failed jobs', async () => { + sendMock.mockResolvedValue(200); + const getFileSizeSpy = jest.spyOn(utils, 'getFileSize'); + getFileSizeSpy.mockResolvedValue(2000); + const expirationTime = new Date(); + const errMessage = 'gpkg failed to create'; + const expectedCallbackData: ICallbackDataBase = { + fileUri: '', + expirationTime: expirationTime, + fileSize: 0, + dbId: '880a9316-0f10-4874-92e2-a62d587a1169', + packageName: 'test.gpkg', + requestId: 'b729f0e0-af64-4c2c-ba4e-e799e2f3df0f', + targetResolution: 0.072, + success: false, + errorReason: errMessage, + }; + const callbackData = await tasksManager.sendCallbacks(mockJob, expirationTime, errMessage); + expect(callbackData).toEqual(expectedCallbackData); + }); + + it('should return callback data even if callback response got rejected', async () => { + sendMock.mockRejectedValue({}); + const expirationTime = new Date(); + + const action = async () => tasksManager.sendCallbacks(mockJob, expirationTime); + await expect(action()).resolves.not.toThrow(); + }); }); - it('should return an empty jobs response if task is in aborted', async () => { - const jobs: JobResponse[] = []; - const abortedMockJob = { ...mockJob, abortedTasks: 1 }; - jobs.push(abortedMockJob); - getInProgressJobsMock.mockResolvedValue(jobs); - - const jobsStatus = await tasksManager.getJobsByTaskStatus(); - - expect(jobsStatus.completedJobs?.length).toBe(0); - expect(jobsStatus.failedJobs?.length).toBe(0); - expect(getInProgressJobsMock).toHaveBeenCalledTimes(1); + describe('#finalizeJob', () => { + let sendCallbacksSpy: jest.SpyInstance; + + it('should successfully finalize a job with status completed', async () => { + const expirationTime = new Date(); + createJsonMetadataMock.mockResolvedValue({}); + updateJobMock.mockResolvedValue({}); + sendCallbacksSpy = jest.spyOn(tasksManager, 'sendCallbacks'); + + const action = async () => tasksManager.finalizeJob(mockJob, expirationTime); + await expect(action()).resolves.not.toThrow(); + expect(createJsonMetadataMock).toHaveBeenCalledTimes(1); + expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); + expect(updateJobMock).toHaveBeenCalledTimes(1); + }); + + it('should successfully finalize a job with status failed due to error while create json metadata file', async () => { + const expirationTime = new Date(); + createJsonMetadataMock.mockRejectedValue({}); + updateJobMock.mockResolvedValue({}); + sendCallbacksSpy = jest.spyOn(tasksManager, 'sendCallbacks'); + + const action = async () => tasksManager.finalizeJob(mockJob, expirationTime); + await expect(action()).resolves.not.toThrow(); + expect(createJsonMetadataMock).toHaveBeenCalledTimes(1); + expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); + expect(updateJobMock).toHaveBeenCalledTimes(1); + }); + + it('should successfully finalize a job with job status failed without create json metadata file due to failed in task', async () => { + const expirationTime = new Date(); + updateJobMock.mockResolvedValue({}); + sendCallbacksSpy = jest.spyOn(tasksManager, 'sendCallbacks'); + + const errMessage = 'gpkg failed to create'; + const action = async () => tasksManager.finalizeJob(mockJob, expirationTime, false, errMessage); + await expect(action()).resolves.not.toThrow(); + expect(createJsonMetadataMock).toHaveBeenCalledTimes(0); + expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); + expect(updateJobMock).toHaveBeenCalledTimes(1); + }); }); }); - - describe('#sendCallbacks', () => { - it('should return callback data with the expected params for success jobs', async () => { - sendMock.mockResolvedValue(200); - const getFileSizeSpy = jest.spyOn(utils, 'getFileSize'); - getFileSizeSpy.mockResolvedValue(2000); - const expirationTime = new Date(); - const expectedCallbackData: ICallbackDataBase = { - fileUri: `http://download-service/downloads/test${sep}test.gpkg`, - expirationTime: expirationTime, - fileSize: 2000, - dbId: '880a9316-0f10-4874-92e2-a62d587a1169', - packageName: 'test.gpkg', - requestId: 'b729f0e0-af64-4c2c-ba4e-e799e2f3df0f', - targetResolution: 0.072, - success: true, - errorReason: undefined, - }; - - const callbackData = await tasksManager.sendCallbacks(mockJob, expirationTime); - expect(callbackData).toEqual(expectedCallbackData); - }); - - it('should return callback data with the expected params for failed jobs', async () => { - sendMock.mockResolvedValue(200); - const getFileSizeSpy = jest.spyOn(utils, 'getFileSize'); - getFileSizeSpy.mockResolvedValue(2000); - const expirationTime = new Date(); - const errMessage = 'gpkg failed to create'; - const expectedCallbackData: ICallbackDataBase = { - fileUri: '', - expirationTime: expirationTime, - fileSize: 0, - dbId: '880a9316-0f10-4874-92e2-a62d587a1169', - packageName: 'test.gpkg', - requestId: 'b729f0e0-af64-4c2c-ba4e-e799e2f3df0f', - targetResolution: 0.072, - success: false, - errorReason: errMessage, - }; - const callbackData = await tasksManager.sendCallbacks(mockJob, expirationTime, errMessage); - expect(callbackData).toEqual(expectedCallbackData); + describe('ROI', () => { + describe('#getTaskStatusByJobId', () => { + it('should throw NotFoundError when jobId is not exists', async () => { + const emptyTasksResponse: TaskResponse[] = []; + + getTasksByJobIdStub = jest.fn(); + jobManagerWrapperMock.getTasksByJobId = getTasksByJobIdStub.mockResolvedValue(emptyTasksResponse); + + const action = async () => tasksManager.getTaskStatusByJobId('09e29fa8-7283-4334-b3a4-99f75922de59'); + + await expect(action).rejects.toThrow(NotFoundError); + expect(getTasksByJobIdStub).toHaveBeenCalledTimes(1); + }); + + it('should successfully return task status by jobId', async () => { + const tasksResponse: TaskResponse[] = [ + { + id: '0a5552f7-01eb-40af-a912-eed8fa9e1561', + jobId: '0a5552f7-01eb-40af-a912-eed8fa9e1568', + type: 'rasterTilesExporterd', + description: '', + parameters: {} as unknown as ITaskParameters, + status: OperationStatus.IN_PROGRESS, + percentage: 23, + reason: '', + attempts: 0, + resettable: true, + created: '2022-08-02T13:02:18.475Z', + updated: '2022-08-02T15:01:56.658Z', + }, + ]; + + getTasksByJobIdStub = jest.fn(); + jobManagerWrapperMock.getTasksByJobId = getTasksByJobIdStub.mockResolvedValue(tasksResponse); + + const result = tasksManager.getTaskStatusByJobId('0a5552f7-01eb-40af-a912-eed8fa9e1568'); + const expectedResult: ITaskStatusResponse = { + percentage: tasksResponse[0].percentage, + status: tasksResponse[0].status, + }; + await expect(result).resolves.not.toThrow(); + await expect(result).resolves.toEqual(expectedResult); + expect(getTasksByJobIdStub).toHaveBeenCalledTimes(1); + }); }); - it('should return callback data even if callback response got rejected', async () => { - sendMock.mockRejectedValue({}); - const expirationTime = new Date(); - - const action = async () => tasksManager.sendCallbacks(mockJob, expirationTime); - await expect(action()).resolves.not.toThrow(); - }); - }); - - describe('#finalizeJob', () => { - let sendCallbacksSpy: jest.SpyInstance; - - it('should successfuly finalize a job with status completed', async () => { - const expirationTime = new Date(); - createJsonMetadataMock.mockResolvedValue({}); - updateJobMock.mockResolvedValue({}); - sendCallbacksSpy = jest.spyOn(tasksManager, 'sendCallbacks'); - - const action = async () => tasksManager.finalizeJob(mockJob, expirationTime); - await expect(action()).resolves.not.toThrow(); - expect(createJsonMetadataMock).toHaveBeenCalledTimes(1); - expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); - expect(updateJobMock).toHaveBeenCalledTimes(1); + describe('#getJobsByTaskStatus', () => { + it('should return completed job with no failed jobs', async () => { + const jobs: JobExportResponse[] = []; + const completedMockJob = { ...mockCompletedJob, completedTasks: 1 }; + jobs.push(completedMockJob); + getExportJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getExportJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(1); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getExportJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return failed job with no completed jobs', async () => { + const jobs: JobExportResponse[] = []; + const failedMockJob = { ...mockCompletedJob, failedTasks: 1 }; + jobs.push(failedMockJob); + getExportJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getExportJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(1); + expect(getExportJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return completed job and failed job', async () => { + const jobs: JobExportResponse[] = []; + const completedMockJob = { ...mockCompletedJob, completedTasks: 1 }; + const failedMockJob = { ...mockCompletedJob, failedTasks: 1 }; + jobs.push(completedMockJob, failedMockJob); + getExportJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getExportJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(1); + expect(jobsStatus.failedJobs?.length).toBe(1); + expect(getExportJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return an empty jobs response if task is in progress', async () => { + const jobs: JobExportResponse[] = []; + + const inProgressMockJob = { ...mockCompletedJob, inProgressTasks: 1 }; + jobs.push(inProgressMockJob); + getExportJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getExportJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getExportJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return an empty jobs response if task is in pending', async () => { + const jobs: JobExportResponse[] = []; + const pendingMockJob = { ...mockCompletedJob, pendingTasks: 1 }; + jobs.push(pendingMockJob); + getExportJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getExportJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getExportJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return an empty jobs response if task is in expired', async () => { + const jobs: JobExportResponse[] = []; + const expiredMockJob = { ...mockCompletedJob, expiredTasks: 1 }; + jobs.push(expiredMockJob); + getExportJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getExportJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getExportJobsMock).toHaveBeenCalledTimes(1); + }); + + it('should return an empty jobs response if task is in aborted', async () => { + const jobs: JobExportResponse[] = []; + const abortedMockJob = { ...mockCompletedJob, abortedTasks: 1 }; + jobs.push(abortedMockJob); + getExportJobsMock.mockResolvedValue(jobs); + + const jobsStatus = await tasksManager.getExportJobsByTaskStatus(); + + expect(jobsStatus.completedJobs?.length).toBe(0); + expect(jobsStatus.failedJobs?.length).toBe(0); + expect(getExportJobsMock).toHaveBeenCalledTimes(1); + }); }); - it('should successfuly finalize a job with status failed due to error while create json metadata file', async () => { - const expirationTime = new Date(); - createJsonMetadataMock.mockRejectedValue({}); - updateJobMock.mockResolvedValue({}); - sendCallbacksSpy = jest.spyOn(tasksManager, 'sendCallbacks'); - - const action = async () => tasksManager.finalizeJob(mockJob, expirationTime); - await expect(action()).resolves.not.toThrow(); - expect(createJsonMetadataMock).toHaveBeenCalledTimes(1); - expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); - expect(updateJobMock).toHaveBeenCalledTimes(1); + describe('#sendCallbacks', () => { + it('should send callback data with the expected params for success jobs to all clients', async () => { + sendMock.mockResolvedValue(200); + const expirationTime = new Date(); + const callbackData: ICallbackDataExportBase = { + links: { + dataURI: 'http://download-service/downloads/test${sep}test.gpkg', + metadataURI: 'http://download-service/downloads/test${sep}test.json', + }, + recordCatalogId: '880a9316-0f10-4874-92e2-a62d587a1169', + requestJobId: 'b729f0e0-af64-4c2c-ba4e-e799e2f3df0f', + expirationTime: expirationTime, + fileSize: 2000, + errorReason: undefined, + }; + + const actualCallBackUrls = mockCompletedJob.parameters.callbacks.map((callback) => callback.url); + + await tasksManager.sendExportCallbacks(mockCompletedJob, callbackData); + expect(sendMock).toHaveBeenCalledTimes(2); + expect(sendMock.mock.calls).toHaveLength(mockCompletedJob.parameters.callbacks.length); + const receviedCallbacks = sendMock.mock.calls; + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-return + const urlsArr = receviedCallbacks.map((call) => call[0]); + expect(urlsArr).toEqual(actualCallBackUrls); + }); + + it('should return callback data even if callback response got rejected', async () => { + sendMock.mockRejectedValue({}); + const expirationTime = new Date(); + const callbackData: ICallbackDataExportBase = { + links: { + dataURI: 'http://download-service/downloads/test${sep}test.gpkg', + metadataURI: 'http://download-service/downloads/test${sep}test.json', + }, + recordCatalogId: '880a9316-0f10-4874-92e2-a62d587a1169', + requestJobId: 'b729f0e0-af64-4c2c-ba4e-e799e2f3df0f', + expirationTime: expirationTime, + fileSize: 2000, + errorReason: undefined, + }; + const action = async () => tasksManager.sendExportCallbacks(mockCompletedJob, callbackData); + await expect(action()).resolves.not.toThrow(); + expect(sendMock).toHaveBeenCalledTimes(2); + }); }); - it('should successfuly finalize a job with job status failed without create json metadata file due to failed in task', async () => { - const expirationTime = new Date(); - updateJobMock.mockResolvedValue({}); - sendCallbacksSpy = jest.spyOn(tasksManager, 'sendCallbacks'); - - const errMessage = 'gpkg failed to create'; - const action = async () => tasksManager.finalizeJob(mockJob, expirationTime, false, errMessage); - await expect(action()).resolves.not.toThrow(); - expect(createJsonMetadataMock).toHaveBeenCalledTimes(0); - expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); - expect(updateJobMock).toHaveBeenCalledTimes(1); + describe('#FinalizingExportJob', () => { + let sendCallbacksSpy: jest.SpyInstance; + + it('should successfully finalize a job with status completed', async () => { + const downloadUrl = configMock.get('downloadServerUrl'); + const getFileSizeSpy = jest.spyOn(utils, 'getFileSize'); + getFileSizeSpy.mockResolvedValue(2000); + const expirationTime = new Date(); + createExportJsonMetadataMock.mockResolvedValue({}); + updateJobMock.mockResolvedValue({}); + sendCallbacksSpy = jest.spyOn(tasksManager, 'sendExportCallbacks'); + + const expectedCallbackParamData: ICallbackDataExportBase = { + expirationTime, + fileSize: 2000, + links: { + dataURI: `${downloadUrl}/downloads/${mockCompletedJob.parameters.relativeDirectoryPath}/${mockCompletedJob.parameters.fileNamesTemplates.dataURI}`, + metadataURI: `${downloadUrl}/downloads/${mockCompletedJob.parameters.relativeDirectoryPath}/${mockCompletedJob.parameters.fileNamesTemplates.metadataURI}`, + }, + recordCatalogId: mockCompletedJob.internalId as string, + requestJobId: mockCompletedJob.id, + errorReason: undefined, + }; + + const expectedUpdateRequest = { + reason: undefined, + percentage: 100, + status: OperationStatus.COMPLETED, + expirationDate: expirationTime, + parameters: { + ...mockCompletedJob.parameters, + callbackParams: { ...expectedCallbackParamData, roi: mockCompletedJob.parameters.roi, status: OperationStatus.COMPLETED }, + }, + }; + const action = async () => tasksManager.finalizeExportJob(mockCompletedJob, expirationTime); + await expect(action()).resolves.not.toThrow(); + expect(createExportJsonMetadataMock).toHaveBeenCalledTimes(1); + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const createdCallbackParam: ICallbackDataExportBase = sendCallbacksSpy.mock.calls[0][1] as ICallbackDataExportBase; + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const updateRequest = updateJobMock.mock.calls[0][1] as IUpdateJobBody; + expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); + expect(updateJobMock).toHaveBeenCalledTimes(1); + expect(createdCallbackParam).toStrictEqual(expectedCallbackParamData); + expect(updateRequest).toStrictEqual(expectedUpdateRequest); + }); + + it('should successfully finalize a job with status completed even if gpkg file size was failed', async () => { + const downloadUrl = configMock.get('downloadServerUrl'); + const getFileSizeSpy = jest.spyOn(utils, 'getFileSize'); + getFileSizeSpy.mockRejectedValue({ message: 'failed getting file size' }); + const expirationTime = new Date(); + createExportJsonMetadataMock.mockResolvedValue({}); + updateJobMock.mockResolvedValue({}); + sendCallbacksSpy = jest.spyOn(tasksManager, 'sendExportCallbacks'); + + const expectedCallbackParamData: ICallbackDataExportBase = { + expirationTime, + fileSize: 0, + links: { + dataURI: `${downloadUrl}/downloads/${mockCompletedJob.parameters.relativeDirectoryPath}/${mockCompletedJob.parameters.fileNamesTemplates.dataURI}`, + metadataURI: `${downloadUrl}/downloads/${mockCompletedJob.parameters.relativeDirectoryPath}/${mockCompletedJob.parameters.fileNamesTemplates.metadataURI}`, + }, + recordCatalogId: mockCompletedJob.internalId as string, + requestJobId: mockCompletedJob.id, + errorReason: undefined, + }; + + const expectedUpdateRequest = { + reason: undefined, + percentage: 100, + status: OperationStatus.COMPLETED, + expirationDate: expirationTime, + parameters: { + ...mockCompletedJob.parameters, + callbackParams: { ...expectedCallbackParamData, roi: mockCompletedJob.parameters.roi, status: OperationStatus.COMPLETED }, + }, + }; + const action = async () => tasksManager.finalizeExportJob(mockCompletedJob, expirationTime); + await expect(action()).resolves.not.toThrow(); + expect(createExportJsonMetadataMock).toHaveBeenCalledTimes(1); + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const createdCallbackParam: ICallbackDataExportBase = sendCallbacksSpy.mock.calls[0][1] as ICallbackDataExportBase; + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const updateRequest = updateJobMock.mock.calls[0][1] as IUpdateJobBody; + expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); + expect(updateJobMock).toHaveBeenCalledTimes(1); + expect(createdCallbackParam).toStrictEqual(expectedCallbackParamData); + expect(updateRequest).toStrictEqual(expectedUpdateRequest); + }); + + it('should successfully finalize a job with status failed due to error while create json metadata file', async () => { + const expirationTime = new Date(); + const getFileSizeSpy = jest.spyOn(utils, 'getFileSize'); + getFileSizeSpy.mockResolvedValue(0); + createExportJsonMetadataMock.mockRejectedValue({ message: 'failed generate metadata.json' }); + updateJobMock.mockResolvedValue({}); + sendCallbacksSpy = jest.spyOn(tasksManager, 'sendExportCallbacks'); + + const expectedUpdateRequest = { + reason: JSON.stringify({ message: 'failed generate metadata.json' }), + percentage: 100, + status: OperationStatus.FAILED, + expirationDate: expirationTime, + }; + const action = async () => tasksManager.finalizeExportJob(mockCompletedJob, expirationTime); + await expect(action()).resolves.not.toThrow(); + expect(createExportJsonMetadataMock).toHaveBeenCalledTimes(1); + expect(sendCallbacksSpy).toHaveBeenCalledTimes(0); + expect(getFileSizeSpy).toHaveBeenCalledTimes(0); + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const updateRequest = updateJobMock.mock.calls[0][1] as IUpdateJobBody; + expect(updateRequest).toStrictEqual(expectedUpdateRequest); + expect(updateJobMock).toHaveBeenCalledTimes(1); + }); + + it('should successfully finalize a job with status failed with failure of process callbackParam failure', async () => { + const getFileSizeSpy = jest.spyOn(utils, 'getFileSize'); + getFileSizeSpy.mockRejectedValue({ message: 'failed getting file size' }); + const expirationTime = new Date(); + createExportJsonMetadataMock.mockResolvedValue({}); + updateJobMock.mockResolvedValue({}); + sendCallbacksSpy = jest.spyOn(tasksManager, 'sendExportCallbacks'); + + const expectedCallbackParamData: ICallbackDataExportBase = { + expirationTime, + fileSize: 0, + links: mockCompletedJob.parameters.fileNamesTemplates, + recordCatalogId: mockCompletedJob.internalId as string, + requestJobId: mockCompletedJob.id, + errorReason: 'testError', + }; + + const expectedUpdateRequest = { + reason: 'testError', + percentage: undefined, + status: OperationStatus.FAILED, + expirationDate: expirationTime, + parameters: { + ...mockCompletedJob.parameters, + callbackParams: { ...expectedCallbackParamData, roi: mockCompletedJob.parameters.roi, status: OperationStatus.FAILED }, + }, + }; + const action = async () => tasksManager.finalizeExportJob(mockCompletedJob, expirationTime, false, 'testError'); + await expect(action()).resolves.not.toThrow(); + expect(createExportJsonMetadataMock).toHaveBeenCalledTimes(0); + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const createdCallbackParam: ICallbackDataExportBase = sendCallbacksSpy.mock.calls[0][1] as ICallbackDataExportBase; + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + const updateRequest = updateJobMock.mock.calls[0][1] as IUpdateJobBody; + expect(sendCallbacksSpy).toHaveBeenCalledTimes(1); + expect(updateJobMock).toHaveBeenCalledTimes(1); + expect(createdCallbackParam).toStrictEqual(expectedCallbackParamData); + expect(updateRequest).toStrictEqual(expectedUpdateRequest); + }); }); }); });