From 05e4fc7afebc8cea7a9006370d30407603f1b489 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 9 Apr 2024 01:17:41 +0100 Subject: [PATCH 01/12] Stash: creating and deleting tes datasets before and after tests WIP (Skipped tests temporary) --- test/environment/setup.ts | 93 --------- .../collections/CollectionsRepository.test.ts | 2 +- .../datasets/DatasetsRepository.test.ts | 189 +++++++++++++----- .../integration/files/FilesRepository.test.ts | 2 +- test/testHelpers/TestConstants.ts | 41 +++- .../collections/collectionHelper.ts | 15 ++ .../collections/test-collection-1.json | 2 +- test/testHelpers/datasets/datasetHelper.ts | 47 +++++ 8 files changed, 246 insertions(+), 145 deletions(-) diff --git a/test/environment/setup.ts b/test/environment/setup.ts index 446809b1..3126f8b1 100644 --- a/test/environment/setup.ts +++ b/test/environment/setup.ts @@ -2,14 +2,7 @@ import * as fs from 'fs' import { DockerComposeEnvironment, Wait } from 'testcontainers' import axios from 'axios' import { TestConstants } from '../testHelpers/TestConstants' -import datasetJson1 from '../testHelpers/datasets/test-dataset-1.json' -import datasetJson2 from '../testHelpers/datasets/test-dataset-2.json' -import datasetJson3 from '../testHelpers/datasets/test-dataset-3.json' -import datasetJson4 from '../testHelpers/datasets/test-dataset-4.json' -import collectionJson from '../testHelpers/collections/test-collection-1.json' -import { ROOT_COLLECTION_ALIAS } from '../../src/collections/domain/models/Collection' -const NUMBER_OF_DATASETS = 4 const COMPOSE_FILE = 'docker-compose.yml' const CONTAINER_DATAVERSE_BOOTSTRAP_NAME = 'test_dataverse_bootstrap' @@ -24,7 +17,6 @@ const API_KEY_USER_PASSWORD = 'admin1' export default async function setupTestEnvironment(): Promise { await setupContainers() await setupApiKey() - await setupTestFixtures() } async function setupContainers(): Promise { @@ -54,88 +46,3 @@ async function setupApiKey(): Promise { }) console.log('API key obtained') } - -async function setupTestFixtures(): Promise { - console.log('Creating test datasets...') - await createDatasetViaApi(datasetJson1) - .then() - .catch(() => { - console.error('Tests setup: Error while creating test Dataset 1') - }) - await createDatasetViaApi(datasetJson2).catch(() => { - console.error('Tests setup: Error while creating test Dataset 2') - }) - await createDatasetViaApi(datasetJson4).catch(() => { - console.error('Tests setup: Error while creating test Dataset 4') - }) - await createCollectionViaApi(collectionJson) - .then() - .catch(() => { - console.error('Tests setup: Error while creating test Collection 1') - }) - await createDatasetViaApi(datasetJson3, collectionJson.alias) - .then() - .catch(() => { - console.error('Tests setup: Error while creating test Dataset 3') - }) - console.log('Test datasets created') - await waitForDatasetsIndexingInSolr() -} - -/* eslint-disable @typescript-eslint/no-explicit-any */ -async function createCollectionViaApi(collectionJson: any): Promise { - return await axios.post( - `${TestConstants.TEST_API_URL}/dataverses/root`, - collectionJson, - buildRequestHeaders() - ) -} - -/* eslint-disable @typescript-eslint/no-explicit-any */ -async function createDatasetViaApi( - datasetJson: any, - collectionId = ROOT_COLLECTION_ALIAS -): Promise { - return await axios.post( - `${TestConstants.TEST_API_URL}/dataverses/${collectionId}/datasets`, - datasetJson, - buildRequestHeaders() - ) -} - -/* eslint-disable @typescript-eslint/no-explicit-any */ -async function waitForDatasetsIndexingInSolr(): Promise { - console.log('Waiting for datasets indexing in Solr...') - let datasetsIndexed = false - let retry = 0 - while (!datasetsIndexed && retry < 10) { - await axios - .get(`${TestConstants.TEST_API_URL}/search?q=*&type=dataset`, buildRequestHeaders()) - .then((response) => { - const nDatasets = response.data.data.items.length - if (nDatasets === NUMBER_OF_DATASETS) { - datasetsIndexed = true - } - }) - .catch((error) => { - console.error( - `Tests setup: Error while waiting for datasets indexing in Solr: [${ - error.response.status - }]${error.response.data ? ` ${error.response.data.message}` : ''}` - ) - }) - await new Promise((resolve) => setTimeout(resolve, 1000)) - retry++ - } - if (!datasetsIndexed) { - throw new Error('Tests setup: Timeout reached while waiting for datasets indexing in Solr') - } - console.log('Datasets indexed in Solr') -} - -/* eslint-disable @typescript-eslint/no-explicit-any */ -function buildRequestHeaders(): any { - return { - headers: { 'Content-Type': 'application/json', 'X-Dataverse-Key': process.env.TEST_API_KEY } - } -} diff --git a/test/integration/collections/CollectionsRepository.test.ts b/test/integration/collections/CollectionsRepository.test.ts index 31a14043..a8902d58 100644 --- a/test/integration/collections/CollectionsRepository.test.ts +++ b/test/integration/collections/CollectionsRepository.test.ts @@ -4,7 +4,7 @@ import { ReadError } from '../../../src' import { ApiConfig } from '../../../src' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' -describe('CollectionsRepository', () => { +describe.skip('CollectionsRepository', () => { const testGetCollection: CollectionsRepository = new CollectionsRepository() beforeEach(async () => { diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index b8af8cd6..e736ba53 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -2,16 +2,21 @@ import { DatasetsRepository } from '../../../src/datasets/infra/repositories/Dat import { TestConstants } from '../../testHelpers/TestConstants' import { createPrivateUrlViaApi, - deaccessionDatasetViaApi, publishDatasetViaApi, - waitForNoLocks + waitForNoLocks, + deleteUnpublishedDatasetViaApi, + waitForDatasetsIndexedInSolr, + deletePublishedDatasetViaApi, + deaccessionDatasetViaApi } from '../../testHelpers/datasets/datasetHelper' import { ReadError } from '../../../src/core/domain/repositories/ReadError' import { DatasetLockType, DatasetNotNumberedVersion, DatasetPreviewSubset, - VersionUpdateType + VersionUpdateType, + createDataset, + CreatedDatasetIdentifiers } from '../../../src/datasets' import { ApiConfig, WriteError } from '../../../src' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' @@ -22,14 +27,13 @@ import { DatasetDescription } from '../../../src/datasets/domain/models/Dataset' import { ROOT_COLLECTION_ALIAS } from '../../../src/collections/domain/models/Collection' +import { createCollectionViaApi } from '../../testHelpers/collections/collectionHelper' describe('DatasetsRepository', () => { const sut: DatasetsRepository = new DatasetsRepository() const nonExistentTestDatasetId = 100 - const latestVersionId = DatasetNotNumberedVersion.LATEST - - beforeEach(async () => { + beforeAll(async () => { ApiConfig.init( TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, @@ -37,7 +41,7 @@ describe('DatasetsRepository', () => { ) }) - afterEach(async () => { + afterAll(async () => { ApiConfig.init( TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, @@ -48,45 +52,91 @@ describe('DatasetsRepository', () => { describe('getAllDatasetPreviews', () => { const testPageLimit = 1 const expectedTotalDatasetCount = 4 + let firstDatasetIds: CreatedDatasetIdentifiers + let secondDatasetIds: CreatedDatasetIdentifiers + let thirdDatasetIds: CreatedDatasetIdentifiers + let fourthDatasetIds: CreatedDatasetIdentifiers + + beforeAll(async () => { + await createDatasets() + }) + + afterAll(async () => { + await deleteDatasets() + }) + + const createDatasets = async () => { + try { + firstDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + secondDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + thirdDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + + await createCollectionViaApi() + + fourthDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) + + await waitForDatasetsIndexedInSolr(expectedTotalDatasetCount) + } catch (error) { + throw Error('Error while creating test datasets') + } + } + + const deleteDatasets = async () => { + try { + await deleteUnpublishedDatasetViaApi(firstDatasetIds.numericId) + await deleteUnpublishedDatasetViaApi(secondDatasetIds.numericId) + await deleteUnpublishedDatasetViaApi(thirdDatasetIds.numericId) + await deleteUnpublishedDatasetViaApi(fourthDatasetIds.numericId) + } catch (error) { + throw Error('Error while deleting test datasets') + } + } test('should return all dataset previews when no pagination params are defined', async () => { const actual: DatasetPreviewSubset = await sut.getAllDatasetPreviews() expect(actual.datasetPreviews.length).toEqual(expectedTotalDatasetCount) - expect(actual.datasetPreviews[0].title).toMatch('Third Dataset') + expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return first dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 0) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].title).toMatch('Third Dataset') + expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return second dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 1) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].title).toMatch('Fourth Dataset') + expect(actual.datasetPreviews[0].persistentId).toMatch(thirdDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return third dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 2) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].title).toMatch('Second Dataset') + expect(actual.datasetPreviews[0].persistentId).toMatch(secondDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return fourth dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 3) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].title).toMatch('First Dataset') + expect(actual.datasetPreviews[0].persistentId).toMatch(firstDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return datasets in the specified collection', async () => { - const actual = await sut.getAllDatasetPreviews(testPageLimit, 0, 'firstCollection') - expect(actual.datasetPreviews[0].title).toMatch('Third Dataset') + const actual = await sut.getAllDatasetPreviews( + testPageLimit, + 0, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) + expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) expect(actual.datasetPreviews.length).toEqual(1) expect(actual.totalDatasetCount).toEqual(1) }) @@ -101,47 +151,68 @@ describe('DatasetsRepository', () => { describe('getDataset', () => { describe('by numeric id', () => { + let testDatasetIds: CreatedDatasetIdentifiers + + beforeAll(async () => { + try { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + } catch (error) { + throw Error('Error while creating test dataset') + } + }) + + afterAll(async () => { + try { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + } catch (error) { + throw Error('Error while deleting test dataset') + } + }) + test('should return dataset when it exists filtering by id and version id', async () => { const actual = await sut.getDataset( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestVersionId, + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, false ) - expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_1_ID) + expect(actual.id).toBe(testDatasetIds.numericId) }) test('should return dataset when it is deaccessioned and includeDeaccessioned param is set', async () => { - await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID) - - await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_2_ID, 10) - - await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID, '1.0') + await publishDatasetViaApi(testDatasetIds.numericId) + await waitForNoLocks(testDatasetIds.numericId, 10) + await deaccessionDatasetViaApi(testDatasetIds.numericId, '1.0') const actual = await sut.getDataset( - TestConstants.TEST_CREATED_DATASET_2_ID, - latestVersionId, + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, true ) - expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_2_ID) + expect(actual.id).toBe(testDatasetIds.numericId) }) test('should return dataset when it is deaccessioned, includeDeaccessioned param is set, and user is unauthenticated', async () => { ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, undefined) const actual = await sut.getDataset( - TestConstants.TEST_CREATED_DATASET_2_ID, - latestVersionId, + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, true ) - expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_2_ID) + expect(actual.id).toBe(testDatasetIds.numericId) + ApiConfig.init( + TestConstants.TEST_API_URL, + DataverseApiAuthMechanism.API_KEY, + process.env.TEST_API_KEY + ) }) test('should return error when dataset is deaccessioned and includeDeaccessioned param is not set', async () => { const expectedError = new ReadError( - `[404] Dataset version ${latestVersionId} of dataset ${TestConstants.TEST_CREATED_DATASET_2_ID} not found` + `[404] Dataset version ${DatasetNotNumberedVersion.LATEST} of dataset ${testDatasetIds.numericId} not found` ) await expect( - sut.getDataset(TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, false) + sut.getDataset(testDatasetIds.numericId, DatasetNotNumberedVersion.LATEST, false) ).rejects.toThrow(expectedError) }) @@ -151,20 +222,42 @@ describe('DatasetsRepository', () => { ) await expect( - sut.getDataset(nonExistentTestDatasetId, latestVersionId, false) + sut.getDataset(nonExistentTestDatasetId, DatasetNotNumberedVersion.LATEST, false) ).rejects.toThrow(expectedError) }) }) describe('by persistent id', () => { + let testDatasetIds: CreatedDatasetIdentifiers + + beforeAll(async () => { + try { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + } catch (error) { + throw Error('Error while creating test dataset') + } + }) + + afterAll(async () => { + try { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + } catch (error) { + throw Error('Error while deleting test dataset') + } + }) + test('should return dataset when it exists filtering by persistent id and version id', async () => { const createdDataset = await sut.getDataset( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestVersionId, + testDatasetIds.numericId, + DatasetNotNumberedVersion.LATEST, false ) - const actual = await sut.getDataset(createdDataset.persistentId, latestVersionId, false) - expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_1_ID) + const actual = await sut.getDataset( + createdDataset.persistentId, + DatasetNotNumberedVersion.LATEST, + false + ) + expect(actual.id).toBe(testDatasetIds.numericId) }) test('should return error when dataset does not exist', async () => { @@ -172,14 +265,14 @@ describe('DatasetsRepository', () => { const expectedError = new ReadError( `[404] Dataset with Persistent ID ${testWrongPersistentId} not found.` ) - await expect(sut.getDataset(testWrongPersistentId, latestVersionId, false)).rejects.toThrow( - expectedError - ) + await expect( + sut.getDataset(testWrongPersistentId, DatasetNotNumberedVersion.LATEST, false) + ).rejects.toThrow(expectedError) }) }) }) - describe('Private URLs', () => { + describe.skip('Private URLs', () => { const expectedErrorInvalidToken = '[404] Private URL user not found' let privateUrlToken: string @@ -242,7 +335,7 @@ describe('DatasetsRepository', () => { }) }) - describe('getDatasetLocks', () => { + describe.skip('getDatasetLocks', () => { test('should return list of dataset locks by dataset id for a dataset while publishing', async () => { await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID) .then() @@ -267,11 +360,11 @@ describe('DatasetsRepository', () => { }) }) - describe('getDatasetCitation', () => { + describe.skip('getDatasetCitation', () => { test('should return citation when dataset exists', async () => { const actualDatasetCitation = await sut.getDatasetCitation( TestConstants.TEST_CREATED_DATASET_1_ID, - latestVersionId, + DatasetNotNumberedVersion.LATEST, false ) expect(typeof actualDatasetCitation).toBe('string') @@ -283,21 +376,21 @@ describe('DatasetsRepository', () => { ) await expect( - sut.getDatasetCitation(nonExistentTestDatasetId, latestVersionId, false) + sut.getDatasetCitation(nonExistentTestDatasetId, DatasetNotNumberedVersion.LATEST, false) ).rejects.toThrow(expectedError) }) test('should return citation when dataset is deaccessioned', async () => { const actualDatasetCitation = await sut.getDatasetCitation( TestConstants.TEST_CREATED_DATASET_2_ID, - latestVersionId, + DatasetNotNumberedVersion.LATEST, true ) expect(typeof actualDatasetCitation).toBe('string') }) }) - describe('createDataset', () => { + describe.skip('createDataset', () => { test('should create a dataset with the provided dataset citation fields', async () => { const testNewDataset = { metadataBlockValues: [ @@ -343,7 +436,7 @@ describe('DatasetsRepository', () => { ) const actualCreatedDataset = await sut.getDataset( createdDataset.numericId, - latestVersionId, + DatasetNotNumberedVersion.LATEST, false ) @@ -380,7 +473,7 @@ describe('DatasetsRepository', () => { }) }) - describe('publishDataset', () => { + describe.skip('publishDataset', () => { test('should publish a new dataset version', async () => { const expectedMajorVersion = 1 await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_4_ID, 10) @@ -390,7 +483,7 @@ describe('DatasetsRepository', () => { const newDatasetVersion = await sut.getDataset( TestConstants.TEST_CREATED_DATASET_4_ID, - latestVersionId, + DatasetNotNumberedVersion.LATEST, false ) diff --git a/test/integration/files/FilesRepository.test.ts b/test/integration/files/FilesRepository.test.ts index ec755977..fbe8f271 100644 --- a/test/integration/files/FilesRepository.test.ts +++ b/test/integration/files/FilesRepository.test.ts @@ -22,7 +22,7 @@ import { waitForNoLocks } from '../../testHelpers/datasets/datasetHelper' -describe('FilesRepository', () => { +describe.skip('FilesRepository', () => { const sut: FilesRepository = new FilesRepository() const testTextFile1Name = 'test-file-1.txt' diff --git a/test/testHelpers/TestConstants.ts b/test/testHelpers/TestConstants.ts index 37d59b6e..d5876144 100644 --- a/test/testHelpers/TestConstants.ts +++ b/test/testHelpers/TestConstants.ts @@ -1,4 +1,5 @@ import { ROOT_COLLECTION_ALIAS } from '../../src/collections/domain/models/Collection' +import { NewDatasetDTO } from '../../src/datasets/domain/dtos/NewDatasetDTO' export class TestConstants { static readonly TEST_API_URL = 'http://localhost:8080/api/v1' @@ -52,6 +53,44 @@ export class TestConstants { static readonly TEST_DUMMY_COLLECTION_ID = 10001 static readonly TEST_DUMMY_COLLECTION_ALIAS = 'dummyCollectionId' static readonly TEST_CREATED_COLLECTION_1_ID = 5 - static readonly TEST_CREATED_COLLECTION_1_ALIAS = 'firstCollection' + static readonly TEST_CREATED_COLLECTION_1_ALIAS = 'testCollection' + static readonly TEST_CREATED_COLLECTION_ALIAS = 'testCollection' static readonly TEST_CREATED_COLLECTION_1_ROOT = ROOT_COLLECTION_ALIAS + static readonly TEST_NEW_DATASET_DTO: NewDatasetDTO = { + license: { + name: 'CC0 1.0', + uri: 'http://creativecommons.org/publicdomain/zero/1.0', + iconUri: 'https://licensebuttons.net/p/zero/1.0/88x31.png' + }, + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'Dataset created using the createDataset use case', + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org' + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataversedemo.org' + } + ], + datasetContact: [ + { + datasetContactEmail: 'finch@mailinator.com', + datasetContactName: 'Finch, Fiona' + } + ], + dsDescription: [ + { + dsDescriptionValue: 'This is the description of the dataset.' + } + ], + subject: ['Medicine, Health and Life Sciences'] + } + } + ] + } } diff --git a/test/testHelpers/collections/collectionHelper.ts b/test/testHelpers/collections/collectionHelper.ts index d18c66b3..3123e712 100644 --- a/test/testHelpers/collections/collectionHelper.ts +++ b/test/testHelpers/collections/collectionHelper.ts @@ -1,6 +1,9 @@ import { Collection } from '../../../src/collections' import { DvObjectType } from '../../../src' import { CollectionPayload } from '../../../src/collections/infra/repositories/transformers/CollectionPayload' +import { TestConstants } from '../TestConstants' +import axios from 'axios' +import collectionJson from './test-collection-1.json' const COLLECTION_ID = 11111 const COLLECTION_ALIAS_STR = 'secondCollection' @@ -8,6 +11,10 @@ const COLLECTION_NAME_STR = 'Laboratory Research' const COLLECTION_AFFILIATION_STR = 'Laboratory Research Corporation' const COLLECTION_DESCRIPTION_STR = 'This is an example collection used for testing.' +const DATAVERSE_API_REQUEST_HEADERS = { + headers: { 'Content-Type': 'application/json', 'X-Dataverse-Key': process.env.TEST_API_KEY } +} + export const createCollectionModel = (): Collection => { const collectionModel: Collection = { id: COLLECTION_ID, @@ -31,3 +38,11 @@ export const createCollectionPayload = (): CollectionPayload => { } return collectionPayload } + +export async function createCollectionViaApi(): Promise { + return await axios.post( + `${TestConstants.TEST_API_URL}/dataverses/root`, + collectionJson, + DATAVERSE_API_REQUEST_HEADERS + ) +} diff --git a/test/testHelpers/collections/test-collection-1.json b/test/testHelpers/collections/test-collection-1.json index f23d819c..6cd57366 100644 --- a/test/testHelpers/collections/test-collection-1.json +++ b/test/testHelpers/collections/test-collection-1.json @@ -1,6 +1,6 @@ { "id": 4, - "alias": "firstCollection", + "alias": "testCollection", "name": "Scientific Research", "dataverseContacts": [ { diff --git a/test/testHelpers/datasets/datasetHelper.ts b/test/testHelpers/datasets/datasetHelper.ts index c276d4e5..b7f7c35d 100644 --- a/test/testHelpers/datasets/datasetHelper.ts +++ b/test/testHelpers/datasets/datasetHelper.ts @@ -202,6 +202,22 @@ export const createDatasetVersionPayload = ( return datasetPayload } +export const deleteUnpublishedDatasetViaApi = async (datasetId: number): Promise => { + return await axios.delete( + `${TestConstants.TEST_API_URL}/datasets/${datasetId}`, + DATAVERSE_API_REQUEST_HEADERS + ) +} + +export const deletePublishedDatasetViaApi = async ( + datasetPersistentId: string +): Promise => { + return await axios.delete( + `${TestConstants.TEST_API_URL}/datasets/:persistentId/destroy?persistentId=${datasetPersistentId}`, + DATAVERSE_API_REQUEST_HEADERS + ) +} + export const createDatasetLicenseModel = (withIconUri = true): DatasetLicense => { const datasetLicense: DatasetLicense = { name: 'CC0 1.0', @@ -271,3 +287,34 @@ export const waitForNoLocks = async ( throw new Error('Max retries reached.') } } + +export async function waitForDatasetsIndexedInSolr( + expectedNumberOfIndexedDatasets: number +): Promise { + console.log('Waiting for datasets indexing in Solr...') + let datasetsIndexed = false + let retry = 0 + while (!datasetsIndexed && retry < 10) { + await axios + .get(`${TestConstants.TEST_API_URL}/search?q=*&type=dataset`, DATAVERSE_API_REQUEST_HEADERS) + .then((response) => { + const nDatasets = response.data.data.items.length + if (nDatasets === expectedNumberOfIndexedDatasets) { + datasetsIndexed = true + } + }) + .catch((error) => { + console.error( + `Tests setup: Error while waiting for datasets indexing in Solr: [${ + error.response.status + }]${error.response.data ? ` ${error.response.data.message}` : ''}` + ) + }) + await new Promise((resolve) => setTimeout(resolve, 1000)) + retry++ + } + if (!datasetsIndexed) { + throw new Error('Tests setup: Timeout reached while waiting for datasets indexing in Solr') + } + console.log('Datasets indexed in Solr') +} From 5b595bd0ee01fc599ddb87ba6ea8209974af82e4 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 9 Apr 2024 10:45:33 +0100 Subject: [PATCH 02/12] Changed: temporarily skipped tests in DatasetRepository IT using create/destroy dataset mechanism --- .../datasets/DatasetsRepository.test.ts | 175 +++++++++++++----- 1 file changed, 131 insertions(+), 44 deletions(-) diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index e736ba53..a249ed7b 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -80,7 +80,7 @@ describe('DatasetsRepository', () => { await waitForDatasetsIndexedInSolr(expectedTotalDatasetCount) } catch (error) { - throw Error('Error while creating test datasets') + throw new Error('Tests beforeAll(): Error while creating test datasets') } } @@ -91,7 +91,7 @@ describe('DatasetsRepository', () => { await deleteUnpublishedDatasetViaApi(thirdDatasetIds.numericId) await deleteUnpublishedDatasetViaApi(fourthDatasetIds.numericId) } catch (error) { - throw Error('Error while deleting test datasets') + throw new Error('Tests afterAll():Error while deleting test datasets') } } @@ -157,7 +157,7 @@ describe('DatasetsRepository', () => { try { testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) } catch (error) { - throw Error('Error while creating test dataset') + throw new Error('Tests beforeAll(): Error while creating test dataset') } }) @@ -165,7 +165,7 @@ describe('DatasetsRepository', () => { try { await deletePublishedDatasetViaApi(testDatasetIds.persistentId) } catch (error) { - throw Error('Error while deleting test dataset') + throw new Error('Tests afterAll(): Error while deleting test dataset') } }) @@ -234,15 +234,15 @@ describe('DatasetsRepository', () => { try { testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) } catch (error) { - throw Error('Error while creating test dataset') + throw new Error('Tests beforeAll(): Error while creating test dataset') } }) afterAll(async () => { try { - await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) } catch (error) { - throw Error('Error while deleting test dataset') + throw new Error('Tests afterAll(): Error while deleting test dataset') } }) @@ -272,28 +272,41 @@ describe('DatasetsRepository', () => { }) }) - describe.skip('Private URLs', () => { + describe('Private URLs', () => { const expectedErrorInvalidToken = '[404] Private URL user not found' + let testDatasetIds: CreatedDatasetIdentifiers let privateUrlToken: string beforeAll(async () => { try { - const response = await createPrivateUrlViaApi(TestConstants.TEST_CREATED_DATASET_1_ID) + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + } catch (error) { + throw new Error('Tests beforeAll(): Error while creating test dataset') + } + try { + const response = await createPrivateUrlViaApi(testDatasetIds.numericId) privateUrlToken = response.data.data.token } catch (error) { throw new Error('Tests beforeAll(): Error while creating Dataset private URL') } }) + afterAll(async () => { + try { + await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) + } catch (error) { + throw new Error('Tests afterAll(): Error while deleting test dataset') + } + }) + describe('getPrivateUrlDataset', () => { test('should return dataset when token is valid', async () => { const actual = await sut.getPrivateUrlDataset(privateUrlToken) - expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_1_ID) + expect(actual.id).toBe(testDatasetIds.numericId) }) test('should return error when token is not valid', async () => { const expectedError = new ReadError(expectedErrorInvalidToken) - await expect(sut.getPrivateUrlDataset('invalidToken')).rejects.toThrow(expectedError) }) }) @@ -306,49 +319,82 @@ describe('DatasetsRepository', () => { test('should return error when token is not valid', async () => { const expectedError = new ReadError(expectedErrorInvalidToken) - await expect(sut.getPrivateUrlDatasetCitation('invalidToken')).rejects.toThrow( expectedError ) }) }) + }) - describe('getDatasetUserPermissions', () => { - test('should return user permissions filtering by dataset id', async () => { - const actual = await sut.getDatasetUserPermissions(TestConstants.TEST_CREATED_DATASET_1_ID) - expect(actual.canViewUnpublishedDataset).toBe(true) - expect(actual.canEditDataset).toBe(true) - expect(actual.canPublishDataset).toBe(true) - expect(actual.canManageDatasetPermissions).toBe(true) - expect(actual.canDeleteDatasetDraft).toBe(true) - }) + describe('getDatasetUserPermissions', () => { + let testDatasetIds: CreatedDatasetIdentifiers - test('should return error when dataset does not exist', async () => { - const expectedError = new ReadError( - `[404] Dataset with ID ${nonExistentTestDatasetId} not found.` - ) + beforeAll(async () => { + try { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + } catch (error) { + throw new Error('Tests beforeAll(): Error while creating test dataset') + } + }) - await expect(sut.getDatasetUserPermissions(nonExistentTestDatasetId)).rejects.toThrow( - expectedError - ) - }) + afterAll(async () => { + try { + await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) + } catch (error) { + throw new Error('Tests afterAll(): Error while deleting test dataset') + } + }) + + test('should return user permissions filtering by dataset id', async () => { + const actual = await sut.getDatasetUserPermissions(testDatasetIds.numericId) + expect(actual.canViewUnpublishedDataset).toBe(true) + expect(actual.canEditDataset).toBe(true) + expect(actual.canPublishDataset).toBe(true) + expect(actual.canManageDatasetPermissions).toBe(true) + expect(actual.canDeleteDatasetDraft).toBe(true) + }) + + test('should return error when dataset does not exist', async () => { + const expectedError = new ReadError( + `[404] Dataset with ID ${nonExistentTestDatasetId} not found.` + ) + + await expect(sut.getDatasetUserPermissions(nonExistentTestDatasetId)).rejects.toThrow( + expectedError + ) }) }) - describe.skip('getDatasetLocks', () => { + describe('getDatasetLocks', () => { + let testDatasetIds: CreatedDatasetIdentifiers + + beforeAll(async () => { + try { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + } catch (error) { + throw new Error('Tests beforeAll(): Error while creating test dataset') + } + }) + + afterAll(async () => { + try { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + } catch (error) { + throw new Error('Tests afterAll(): Error while deleting test dataset') + } + }) + test('should return list of dataset locks by dataset id for a dataset while publishing', async () => { - await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID) + await publishDatasetViaApi(testDatasetIds.numericId) .then() .catch((error) => { console.log(JSON.stringify(error)) }) - const actual = await sut.getDatasetLocks(TestConstants.TEST_CREATED_DATASET_2_ID) + const actual = await sut.getDatasetLocks(testDatasetIds.numericId) expect(actual.length).toBe(1) expect(actual[0].lockType).toBe(DatasetLockType.FINALIZE_PUBLICATION) expect(actual[0].userId).toBe('dataverseAdmin') - expect(actual[0].message).toBe( - 'Publishing the dataset; Registering PIDs for Datafiles; Validating Datafiles Asynchronously' - ) + expect(actual[0].message).toContain('Publishing the dataset') }) test('should return error when dataset does not exist', async () => { @@ -360,10 +406,28 @@ describe('DatasetsRepository', () => { }) }) - describe.skip('getDatasetCitation', () => { + describe('getDatasetCitation', () => { + let testDatasetIds: CreatedDatasetIdentifiers + + beforeAll(async () => { + try { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + } catch (error) { + throw new Error('Tests beforeAll(): Error while creating test dataset') + } + }) + + afterAll(async () => { + try { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + } catch (error) { + throw new Error('Tests afterAll(): Error while deleting test dataset') + } + }) + test('should return citation when dataset exists', async () => { const actualDatasetCitation = await sut.getDatasetCitation( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, DatasetNotNumberedVersion.LATEST, false ) @@ -381,16 +445,21 @@ describe('DatasetsRepository', () => { }) test('should return citation when dataset is deaccessioned', async () => { + await publishDatasetViaApi(testDatasetIds.numericId) + await waitForNoLocks(testDatasetIds.numericId, 10) + await deaccessionDatasetViaApi(testDatasetIds.numericId, '1.0') + const actualDatasetCitation = await sut.getDatasetCitation( - TestConstants.TEST_CREATED_DATASET_2_ID, + testDatasetIds.numericId, DatasetNotNumberedVersion.LATEST, true ) + expect(typeof actualDatasetCitation).toBe('string') }) }) - describe.skip('createDataset', () => { + describe('createDataset', () => { test('should create a dataset with the provided dataset citation fields', async () => { const testNewDataset = { metadataBlockValues: [ @@ -473,16 +542,34 @@ describe('DatasetsRepository', () => { }) }) - describe.skip('publishDataset', () => { + describe('publishDataset', () => { + let testDatasetIds: CreatedDatasetIdentifiers + + beforeAll(async () => { + try { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + } catch (error) { + throw new Error('Tests beforeAll(): Error while creating test dataset') + } + }) + + afterAll(async () => { + try { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + } catch (error) { + throw new Error('Tests afterAll(): Error while deleting test dataset') + } + }) + test('should publish a new dataset version', async () => { const expectedMajorVersion = 1 - await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_4_ID, 10) + await waitForNoLocks(testDatasetIds.numericId, 10) - await sut.publishDataset(TestConstants.TEST_CREATED_DATASET_4_ID, VersionUpdateType.MAJOR) - await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_4_ID, 10) + await sut.publishDataset(testDatasetIds.numericId, VersionUpdateType.MAJOR) + await waitForNoLocks(testDatasetIds.numericId, 10) const newDatasetVersion = await sut.getDataset( - TestConstants.TEST_CREATED_DATASET_4_ID, + testDatasetIds.numericId, DatasetNotNumberedVersion.LATEST, false ) From 518de5cde7bc90688d21f63ae1bc34a97b0e0519 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 9 Apr 2024 12:55:27 +0100 Subject: [PATCH 03/12] Changed: temporarily skipped tests in FilesRepository and CollectionsRepository ITs using create/destroy dataset mechanism --- .../collections/CollectionsRepository.test.ts | 36 ++++-- .../datasets/DatasetsRepository.test.ts | 28 ++++- .../integration/files/FilesRepository.test.ts | 112 ++++++++---------- test/testHelpers/TestConstants.ts | 8 -- .../collections/collectionHelper.ts | 7 ++ test/testHelpers/datasets/test-dataset-1.json | 85 ------------- test/testHelpers/datasets/test-dataset-2.json | 85 ------------- test/testHelpers/datasets/test-dataset-3.json | 85 ------------- test/testHelpers/datasets/test-dataset-4.json | 85 ------------- 9 files changed, 107 insertions(+), 424 deletions(-) delete mode 100644 test/testHelpers/datasets/test-dataset-1.json delete mode 100644 test/testHelpers/datasets/test-dataset-2.json delete mode 100644 test/testHelpers/datasets/test-dataset-3.json delete mode 100644 test/testHelpers/datasets/test-dataset-4.json diff --git a/test/integration/collections/CollectionsRepository.test.ts b/test/integration/collections/CollectionsRepository.test.ts index a8902d58..6fcf9a6f 100644 --- a/test/integration/collections/CollectionsRepository.test.ts +++ b/test/integration/collections/CollectionsRepository.test.ts @@ -3,40 +3,55 @@ import { TestConstants } from '../../testHelpers/TestConstants' import { ReadError } from '../../../src' import { ApiConfig } from '../../../src' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' +import { + createCollectionViaApi, + deleteCollectionViaApi +} from '../../testHelpers/collections/collectionHelper' +import { ROOT_COLLECTION_ALIAS } from '../../../src/collections/domain/models/Collection' -describe.skip('CollectionsRepository', () => { +describe('CollectionsRepository', () => { const testGetCollection: CollectionsRepository = new CollectionsRepository() - beforeEach(async () => { + beforeAll(async () => { ApiConfig.init( TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY ) + try { + await createCollectionViaApi() + } catch (error) { + throw new Error('Tests beforeAll(): Error while creating test collection') + } }) - afterEach(async () => { + afterAll(async () => { ApiConfig.init( TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY ) + try { + await deleteCollectionViaApi() + } catch (error) { + throw new Error('Tests afterAll(): Error while deleting test collection') + } }) describe('getCollection', () => { describe('by default `root` Id', () => { test('should return the root collection of the Dataverse installation if no parameter is passed AS `root`', async () => { const actual = await testGetCollection.getCollection() - expect(actual.alias).toBe(TestConstants.TEST_CREATED_COLLECTION_1_ROOT) + expect(actual.alias).toBe(ROOT_COLLECTION_ALIAS) }) }) describe('by string alias', () => { test('should return collection when it exists filtering by id AS (alias)', async () => { const actual = await testGetCollection.getCollection( - TestConstants.TEST_CREATED_COLLECTION_1_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS ) - expect(actual.alias).toBe(TestConstants.TEST_CREATED_COLLECTION_1_ALIAS) + expect(actual.alias).toBe(TestConstants.TEST_CREATED_COLLECTION_ALIAS) }) test('should return error when collection does not exist', async () => { @@ -50,11 +65,10 @@ describe.skip('CollectionsRepository', () => { }) }) describe('by numeric id', () => { - test('should return collection when it exists filtering by id AS (id)', async () => { - const actual = await testGetCollection.getCollection( - TestConstants.TEST_CREATED_COLLECTION_1_ID - ) - expect(actual.id).toBe(TestConstants.TEST_CREATED_COLLECTION_1_ID) + // FIXME + test.skip('should return collection when it exists filtering by id AS (id)', async () => { + const actual = await testGetCollection.getCollection(1) + expect(actual.id).toBe(1) }) test('should return error when collection does not exist', async () => { diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index a249ed7b..0719d6ee 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -27,7 +27,10 @@ import { DatasetDescription } from '../../../src/datasets/domain/models/Dataset' import { ROOT_COLLECTION_ALIAS } from '../../../src/collections/domain/models/Collection' -import { createCollectionViaApi } from '../../testHelpers/collections/collectionHelper' +import { + createCollectionViaApi, + deleteCollectionViaApi +} from '../../testHelpers/collections/collectionHelper' describe('DatasetsRepository', () => { const sut: DatasetsRepository = new DatasetsRepository() @@ -58,21 +61,28 @@ describe('DatasetsRepository', () => { let fourthDatasetIds: CreatedDatasetIdentifiers beforeAll(async () => { + await createCollection() await createDatasets() }) afterAll(async () => { await deleteDatasets() + await deleteCollection() }) + const createCollection = async () => { + try { + await createCollectionViaApi() + } catch (error) { + throw new Error('Tests beforeAll(): Error while creating test collection') + } + } + const createDatasets = async () => { try { firstDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) secondDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) thirdDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - - await createCollectionViaApi() - fourthDatasetIds = await createDataset.execute( TestConstants.TEST_NEW_DATASET_DTO, TestConstants.TEST_CREATED_COLLECTION_ALIAS @@ -91,7 +101,15 @@ describe('DatasetsRepository', () => { await deleteUnpublishedDatasetViaApi(thirdDatasetIds.numericId) await deleteUnpublishedDatasetViaApi(fourthDatasetIds.numericId) } catch (error) { - throw new Error('Tests afterAll():Error while deleting test datasets') + throw new Error('Tests afterAll(): Error while deleting test datasets') + } + } + + const deleteCollection = async () => { + try { + await deleteCollectionViaApi() + } catch (error) { + throw new Error('Tests afterAll(): Error while deleting test collection') } } diff --git a/test/integration/files/FilesRepository.test.ts b/test/integration/files/FilesRepository.test.ts index fbe8f271..c133b8d9 100644 --- a/test/integration/files/FilesRepository.test.ts +++ b/test/integration/files/FilesRepository.test.ts @@ -5,26 +5,33 @@ import { } from '../../../src/core/infra/repositories/ApiConfig' import { TestConstants } from '../../testHelpers/TestConstants' import { registerFileViaApi, uploadFileViaApi } from '../../testHelpers/files/filesHelper' -import { DatasetsRepository } from '../../../src/datasets/infra/repositories/DatasetsRepository' import { ReadError } from '../../../src/core/domain/repositories/ReadError' import { FileSearchCriteria, FileAccessStatus, FileOrderCriteria } from '../../../src/files/domain/models/FileCriteria' -import { DatasetNotNumberedVersion, Dataset } from '../../../src/datasets' +import { + DatasetNotNumberedVersion, + Dataset, + CreatedDatasetIdentifiers, + createDataset +} from '../../../src/datasets' import { File } from '../../../src/files/domain/models/File' import { FileCounts } from '../../../src/files/domain/models/FileCounts' import { FileDownloadSizeMode } from '../../../src' import { deaccessionDatasetViaApi, publishDatasetViaApi, - waitForNoLocks + waitForNoLocks, + deletePublishedDatasetViaApi } from '../../testHelpers/datasets/datasetHelper' -describe.skip('FilesRepository', () => { +describe('FilesRepository', () => { const sut: FilesRepository = new FilesRepository() + let testDatasetIds: CreatedDatasetIdentifiers + const testTextFile1Name = 'test-file-1.txt' const testTextFile2Name = 'test-file-2.txt' const testTextFile3Name = 'test-file-3.txt' @@ -35,43 +42,45 @@ describe.skip('FilesRepository', () => { const latestDatasetVersionId = DatasetNotNumberedVersion.LATEST - const datasetRepository = new DatasetsRepository() - let testFileId: number let testFilePersistentId: string + beforeAll(async () => { ApiConfig.init( TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY ) + try { + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + } catch (error) { + throw new Error('Tests beforeAll(): Error while creating test dataset') + } // Uploading test file 1 with some categories - const uploadFileResponse = await uploadFileViaApi( - TestConstants.TEST_CREATED_DATASET_1_ID, - testTextFile1Name, - { categories: [testCategoryName] } - ) + const uploadFileResponse = await uploadFileViaApi(testDatasetIds.numericId, testTextFile1Name, { + categories: [testCategoryName] + }) .then() .catch((e) => { console.log(e) throw new Error(`Tests beforeAll(): Error while uploading file ${testTextFile1Name}`) }) // Uploading test file 2 - await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTextFile2Name) + await uploadFileViaApi(testDatasetIds.numericId, testTextFile2Name) .then() .catch((e) => { console.log(e) throw new Error(`Tests beforeAll(): Error while uploading file ${testTextFile2Name}`) }) // Uploading test file 3 - await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTextFile3Name) + await uploadFileViaApi(testDatasetIds.numericId, testTextFile3Name) .then() .catch((e) => { console.log(e) throw new Error(`Tests beforeAll(): Error while uploading file ${testTextFile3Name}`) }) // Uploading test file 4 - await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTabFile4Name) + await uploadFileViaApi(testDatasetIds.numericId, testTabFile4Name) .then() .catch((e) => { console.log(e) @@ -80,7 +89,7 @@ describe.skip('FilesRepository', () => { // Registering test file 1 await registerFileViaApi(uploadFileResponse.data.data.files[0].dataFile.id) const filesSubset = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ @@ -89,6 +98,14 @@ describe.skip('FilesRepository', () => { testFilePersistentId = filesSubset.files[0].persistentId }) + afterAll(async () => { + try { + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) + } catch (error) { + throw new Error('Tests afterAll(): Error while deleting test dataset') + } + }) + describe('getDatasetFiles', () => { const testFileCriteria = new FileSearchCriteria() .withContentType('text/plain') @@ -97,7 +114,7 @@ describe.skip('FilesRepository', () => { describe('by numeric id', () => { test('should return all files filtering by dataset id and version id', async () => { const actual = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ @@ -113,7 +130,7 @@ describe.skip('FilesRepository', () => { test('should return correct files filtering by dataset id, version id, and paginating', async () => { const actual = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ, @@ -129,7 +146,7 @@ describe.skip('FilesRepository', () => { test('should return correct files filtering by dataset id, version id, and applying newest file criteria', async () => { const actual = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileOrderCriteria.NEWEST, @@ -164,13 +181,8 @@ describe.skip('FilesRepository', () => { describe('by persistent id', () => { test('should return all files filtering by persistent id and version id', async () => { - const testDataset = await datasetRepository.getDataset( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestDatasetVersionId, - false - ) const actual = await sut.getDatasetFiles( - testDataset.persistentId, + testDatasetIds.persistentId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ @@ -185,13 +197,8 @@ describe.skip('FilesRepository', () => { }) test('should return correct files filtering by persistent id, version id, and paginating', async () => { - const testDataset = await datasetRepository.getDataset( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestDatasetVersionId, - false - ) const actual = await sut.getDatasetFiles( - testDataset.persistentId, + testDatasetIds.persistentId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ, @@ -206,13 +213,8 @@ describe.skip('FilesRepository', () => { }) test('should return correct files filtering by persistent id, version id, and applying newest file criteria', async () => { - const testDataset = await datasetRepository.getDataset( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestDatasetVersionId, - false - ) const actual = await sut.getDatasetFiles( - testDataset.persistentId, + testDatasetIds.persistentId, latestDatasetVersionId, false, FileOrderCriteria.NEWEST, @@ -275,7 +277,7 @@ describe.skip('FilesRepository', () => { test('should return file count filtering by numeric id', async () => { const actual = await sut.getDatasetFileCounts( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false ) @@ -312,7 +314,7 @@ describe.skip('FilesRepository', () => { } const testCriteria = new FileSearchCriteria().withCategoryName(testCategoryName) const actual = await sut.getDatasetFileCounts( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, testCriteria @@ -325,13 +327,8 @@ describe.skip('FilesRepository', () => { }) test('should return file count filtering by persistent id', async () => { - const testDataset = await datasetRepository.getDataset( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestDatasetVersionId, - false - ) const actual = await sut.getDatasetFileCounts( - testDataset.persistentId, + testDatasetIds.persistentId, latestDatasetVersionId, false ) @@ -350,7 +347,7 @@ describe.skip('FilesRepository', () => { test('should return total download size filtering by numeric id and ignoring original tabular size', async () => { const actual = await sut.getDatasetFilesTotalDownloadSize( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileDownloadSizeMode.ORIGINAL @@ -359,13 +356,8 @@ describe.skip('FilesRepository', () => { }) test('should return total download size filtering by persistent id and ignoring original tabular size', async () => { - const testDataset = await datasetRepository.getDataset( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestDatasetVersionId, - false - ) const actual = await sut.getDatasetFilesTotalDownloadSize( - testDataset.persistentId, + testDatasetIds.persistentId, latestDatasetVersionId, false, FileDownloadSizeMode.ORIGINAL @@ -377,7 +369,7 @@ describe.skip('FilesRepository', () => { const expectedTotalDownloadSizeForCriteria = 12 // 12 bytes const testCriteria = new FileSearchCriteria().withCategoryName(testCategoryName) const actual = await sut.getDatasetFilesTotalDownloadSize( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileDownloadSizeMode.ORIGINAL, @@ -390,7 +382,7 @@ describe.skip('FilesRepository', () => { describe('getFileDownloadCount', () => { test('should return count filtering by file id and version id', async () => { const currentTestFilesSubset = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ @@ -410,7 +402,7 @@ describe.skip('FilesRepository', () => { describe('getFileUserPermissions', () => { test('should return user permissions filtering by file id and version id', async () => { const currentTestFilesSubset = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ @@ -433,7 +425,7 @@ describe.skip('FilesRepository', () => { describe('getFileDataTables', () => { test('should return data tables filtering by tabular file id and version id', async () => { const currentTestFilesSubset = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ @@ -445,7 +437,7 @@ describe.skip('FilesRepository', () => { test('should return error when file is not tabular and version id', async () => { const currentTestFilesSubset = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, + testDatasetIds.numericId, latestDatasetVersionId, false, FileOrderCriteria.NAME_AZ @@ -495,7 +487,7 @@ describe.skip('FilesRepository', () => { ] expect(actual[0].name).toBe(testTextFile1Name) - expect(actual[1].id).toBe(TestConstants.TEST_CREATED_DATASET_1_ID) + expect(actual[1].id).toBe(testDatasetIds.numericId) }) test('should return error when file does not exist', async () => { @@ -552,19 +544,19 @@ describe.skip('FilesRepository', () => { }) test('should return citation when dataset is deaccessioned', async () => { - await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID) + await publishDatasetViaApi(testDatasetIds.numericId) .then() .catch(() => { throw new Error('Error while publishing test Dataset') }) - await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_1_ID, 10) + await waitForNoLocks(testDatasetIds.numericId, 10) .then() .catch(() => { throw new Error('Error while waiting for no locks') }) - await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, '1.0') + await deaccessionDatasetViaApi(testDatasetIds.numericId, '1.0') .then() .catch(() => { throw new Error('Error while deaccessioning test Dataset') diff --git a/test/testHelpers/TestConstants.ts b/test/testHelpers/TestConstants.ts index d5876144..d54fa0fa 100644 --- a/test/testHelpers/TestConstants.ts +++ b/test/testHelpers/TestConstants.ts @@ -1,4 +1,3 @@ -import { ROOT_COLLECTION_ALIAS } from '../../src/collections/domain/models/Collection' import { NewDatasetDTO } from '../../src/datasets/domain/dtos/NewDatasetDTO' export class TestConstants { @@ -46,16 +45,9 @@ export class TestConstants { 'Content-Type': 'application/json' } } - static readonly TEST_CREATED_DATASET_1_ID = 2 - static readonly TEST_CREATED_DATASET_2_ID = 3 - static readonly TEST_CREATED_DATASET_3_ID = 5 - static readonly TEST_CREATED_DATASET_4_ID = 4 static readonly TEST_DUMMY_COLLECTION_ID = 10001 static readonly TEST_DUMMY_COLLECTION_ALIAS = 'dummyCollectionId' - static readonly TEST_CREATED_COLLECTION_1_ID = 5 - static readonly TEST_CREATED_COLLECTION_1_ALIAS = 'testCollection' static readonly TEST_CREATED_COLLECTION_ALIAS = 'testCollection' - static readonly TEST_CREATED_COLLECTION_1_ROOT = ROOT_COLLECTION_ALIAS static readonly TEST_NEW_DATASET_DTO: NewDatasetDTO = { license: { name: 'CC0 1.0', diff --git a/test/testHelpers/collections/collectionHelper.ts b/test/testHelpers/collections/collectionHelper.ts index 3123e712..160b28a7 100644 --- a/test/testHelpers/collections/collectionHelper.ts +++ b/test/testHelpers/collections/collectionHelper.ts @@ -46,3 +46,10 @@ export async function createCollectionViaApi(): Promise { DATAVERSE_API_REQUEST_HEADERS ) } + +export async function deleteCollectionViaApi(): Promise { + return await axios.delete( + `${TestConstants.TEST_API_URL}/dataverses/${TestConstants.TEST_CREATED_COLLECTION_ALIAS}`, + DATAVERSE_API_REQUEST_HEADERS + ) +} diff --git a/test/testHelpers/datasets/test-dataset-1.json b/test/testHelpers/datasets/test-dataset-1.json deleted file mode 100644 index 10c93200..00000000 --- a/test/testHelpers/datasets/test-dataset-1.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "datasetVersion": { - "license": { - "name": "CC0 1.0", - "uri": "http://creativecommons.org/publicdomain/zero/1.0", - "iconUri": "https://licensebuttons.net/p/zero/1.0/88x31.png" - }, - "metadataBlocks": { - "citation": { - "fields": [ - { - "value": "First Dataset", - "typeClass": "primitive", - "multiple": false, - "typeName": "title" - }, - { - "value": [ - { - "authorName": { - "value": "Finch, Fiona", - "typeClass": "primitive", - "multiple": false, - "typeName": "authorName" - }, - "authorAffiliation": { - "value": "Birds Inc.", - "typeClass": "primitive", - "multiple": false, - "typeName": "authorAffiliation" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "author" - }, - { - "value": [ - { - "datasetContactEmail": { - "typeClass": "primitive", - "multiple": false, - "typeName": "datasetContactEmail", - "value": "finch@mailinator.com" - }, - "datasetContactName": { - "typeClass": "primitive", - "multiple": false, - "typeName": "datasetContactName", - "value": "Finch, Fiona" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "datasetContact" - }, - { - "value": [ - { - "dsDescriptionValue": { - "value": "This is the description of the first dataset.", - "multiple": false, - "typeClass": "primitive", - "typeName": "dsDescriptionValue" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "dsDescription" - }, - { - "value": ["Medicine, Health and Life Sciences"], - "typeClass": "controlledVocabulary", - "multiple": true, - "typeName": "subject" - } - ], - "displayName": "Citation Metadata" - } - } - } -} diff --git a/test/testHelpers/datasets/test-dataset-2.json b/test/testHelpers/datasets/test-dataset-2.json deleted file mode 100644 index 7cfce6bb..00000000 --- a/test/testHelpers/datasets/test-dataset-2.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "datasetVersion": { - "license": { - "name": "CC0 1.0", - "uri": "http://creativecommons.org/publicdomain/zero/1.0", - "iconUri": "https://licensebuttons.net/p/zero/1.0/88x31.png" - }, - "metadataBlocks": { - "citation": { - "fields": [ - { - "value": "Second Dataset", - "typeClass": "primitive", - "multiple": false, - "typeName": "title" - }, - { - "value": [ - { - "authorName": { - "value": "Finch, Fiona", - "typeClass": "primitive", - "multiple": false, - "typeName": "authorName" - }, - "authorAffiliation": { - "value": "Birds Inc.", - "typeClass": "primitive", - "multiple": false, - "typeName": "authorAffiliation" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "author" - }, - { - "value": [ - { - "datasetContactEmail": { - "typeClass": "primitive", - "multiple": false, - "typeName": "datasetContactEmail", - "value": "finch@mailinator.com" - }, - "datasetContactName": { - "typeClass": "primitive", - "multiple": false, - "typeName": "datasetContactName", - "value": "Finch, Fiona" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "datasetContact" - }, - { - "value": [ - { - "dsDescriptionValue": { - "value": "This is the description of the second dataset.", - "multiple": false, - "typeClass": "primitive", - "typeName": "dsDescriptionValue" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "dsDescription" - }, - { - "value": ["Medicine, Health and Life Sciences"], - "typeClass": "controlledVocabulary", - "multiple": true, - "typeName": "subject" - } - ], - "displayName": "Citation Metadata" - } - } - } -} diff --git a/test/testHelpers/datasets/test-dataset-3.json b/test/testHelpers/datasets/test-dataset-3.json deleted file mode 100644 index 4f867c90..00000000 --- a/test/testHelpers/datasets/test-dataset-3.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "datasetVersion": { - "license": { - "name": "CC0 1.0", - "uri": "http://creativecommons.org/publicdomain/zero/1.0", - "iconUri": "https://licensebuttons.net/p/zero/1.0/88x31.png" - }, - "metadataBlocks": { - "citation": { - "fields": [ - { - "value": "Third Dataset", - "typeClass": "primitive", - "multiple": false, - "typeName": "title" - }, - { - "value": [ - { - "authorName": { - "value": "Finch, Fiona", - "typeClass": "primitive", - "multiple": false, - "typeName": "authorName" - }, - "authorAffiliation": { - "value": "Birds Inc.", - "typeClass": "primitive", - "multiple": false, - "typeName": "authorAffiliation" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "author" - }, - { - "value": [ - { - "datasetContactEmail": { - "typeClass": "primitive", - "multiple": false, - "typeName": "datasetContactEmail", - "value": "finch@mailinator.com" - }, - "datasetContactName": { - "typeClass": "primitive", - "multiple": false, - "typeName": "datasetContactName", - "value": "Finch, Fiona" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "datasetContact" - }, - { - "value": [ - { - "dsDescriptionValue": { - "value": "This is the description of the third dataset.", - "multiple": false, - "typeClass": "primitive", - "typeName": "dsDescriptionValue" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "dsDescription" - }, - { - "value": ["Medicine, Health and Life Sciences"], - "typeClass": "controlledVocabulary", - "multiple": true, - "typeName": "subject" - } - ], - "displayName": "Citation Metadata" - } - } - } -} diff --git a/test/testHelpers/datasets/test-dataset-4.json b/test/testHelpers/datasets/test-dataset-4.json deleted file mode 100644 index 9ae58df5..00000000 --- a/test/testHelpers/datasets/test-dataset-4.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "datasetVersion": { - "license": { - "name": "CC0 1.0", - "uri": "http://creativecommons.org/publicdomain/zero/1.0", - "iconUri": "https://licensebuttons.net/p/zero/1.0/88x31.png" - }, - "metadataBlocks": { - "citation": { - "fields": [ - { - "value": "Fourth Dataset", - "typeClass": "primitive", - "multiple": false, - "typeName": "title" - }, - { - "value": [ - { - "authorName": { - "value": "Finch, Fiona", - "typeClass": "primitive", - "multiple": false, - "typeName": "authorName" - }, - "authorAffiliation": { - "value": "Birds Inc.", - "typeClass": "primitive", - "multiple": false, - "typeName": "authorAffiliation" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "author" - }, - { - "value": [ - { - "datasetContactEmail": { - "typeClass": "primitive", - "multiple": false, - "typeName": "datasetContactEmail", - "value": "finch@mailinator.com" - }, - "datasetContactName": { - "typeClass": "primitive", - "multiple": false, - "typeName": "datasetContactName", - "value": "Finch, Fiona" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "datasetContact" - }, - { - "value": [ - { - "dsDescriptionValue": { - "value": "This is the description of the third dataset.", - "multiple": false, - "typeClass": "primitive", - "typeName": "dsDescriptionValue" - } - } - ], - "typeClass": "compound", - "multiple": true, - "typeName": "dsDescription" - }, - { - "value": ["Medicine, Health and Life Sciences"], - "typeClass": "controlledVocabulary", - "multiple": true, - "typeName": "subject" - } - ], - "displayName": "Citation Metadata" - } - } - } -} From 7ecc577283b6b70381c3575700a3f934adca90e7 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 9 Apr 2024 15:11:50 +0100 Subject: [PATCH 04/12] Fixed: CollectionsRepository unit test --- test/unit/collections/CollectionsRepository.test.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/unit/collections/CollectionsRepository.test.ts b/test/unit/collections/CollectionsRepository.test.ts index eab8e8a0..3df31635 100644 --- a/test/unit/collections/CollectionsRepository.test.ts +++ b/test/unit/collections/CollectionsRepository.test.ts @@ -10,6 +10,7 @@ import { } from '../../testHelpers/collections/collectionHelper' import { TestConstants } from '../../testHelpers/TestConstants' import { ReadError } from '../../../src' +import { ROOT_COLLECTION_ALIAS } from '../../../src/collections/domain/models/Collection' describe('CollectionsRepository', () => { const sut: CollectionsRepository = new CollectionsRepository() @@ -85,7 +86,7 @@ describe('CollectionsRepository', () => { describe('by default root id', () => { test('should return a Collection when no collection id, using ROOT instead is successful', async () => { jest.spyOn(axios, 'get').mockResolvedValue(testCollectionSuccessfulResponse) - const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${TestConstants.TEST_CREATED_COLLECTION_1_ROOT}` + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${ROOT_COLLECTION_ALIAS}` // API Key auth const actual = await sut.getCollection() @@ -96,7 +97,7 @@ describe('CollectionsRepository', () => { test('should return error on repository read error', async () => { jest.spyOn(axios, 'get').mockRejectedValue(TestConstants.TEST_ERROR_RESPONSE) - const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${TestConstants.TEST_CREATED_COLLECTION_1_ROOT}` + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${ROOT_COLLECTION_ALIAS}` let error = undefined as unknown as ReadError From 586c52ba6ded0eeee3b87929c8abb8ce6ffaee7c Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 9 Apr 2024 15:16:48 +0100 Subject: [PATCH 05/12] Changed: temporarily disabled CollectionsRepository IT --- test/integration/collections/CollectionsRepository.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/collections/CollectionsRepository.test.ts b/test/integration/collections/CollectionsRepository.test.ts index 6fcf9a6f..3fba936e 100644 --- a/test/integration/collections/CollectionsRepository.test.ts +++ b/test/integration/collections/CollectionsRepository.test.ts @@ -9,7 +9,7 @@ import { } from '../../testHelpers/collections/collectionHelper' import { ROOT_COLLECTION_ALIAS } from '../../../src/collections/domain/models/Collection' -describe('CollectionsRepository', () => { +describe.skip('CollectionsRepository', () => { const testGetCollection: CollectionsRepository = new CollectionsRepository() beforeAll(async () => { From 5307e3885c3b78517a900c15aa2c8b8726f1012b Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 9 Apr 2024 15:53:19 +0100 Subject: [PATCH 06/12] Changed: getAllDatasetPreviews using subcollection to avoid conflicts with other running ITs --- .../datasets/DatasetsRepository.test.ts | 52 +++++++++++++------ 1 file changed, 35 insertions(+), 17 deletions(-) diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index 0719d6ee..23c15279 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -80,9 +80,18 @@ describe('DatasetsRepository', () => { const createDatasets = async () => { try { - firstDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - secondDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - thirdDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + firstDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) + secondDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) + thirdDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) fourthDatasetIds = await createDataset.execute( TestConstants.TEST_NEW_DATASET_DTO, TestConstants.TEST_CREATED_COLLECTION_ALIAS @@ -114,49 +123,58 @@ describe('DatasetsRepository', () => { } test('should return all dataset previews when no pagination params are defined', async () => { - const actual: DatasetPreviewSubset = await sut.getAllDatasetPreviews() + const actual: DatasetPreviewSubset = await sut.getAllDatasetPreviews( + undefined, + undefined, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) expect(actual.datasetPreviews.length).toEqual(expectedTotalDatasetCount) expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return first dataset preview page', async () => { - const actual = await sut.getAllDatasetPreviews(testPageLimit, 0) + const actual = await sut.getAllDatasetPreviews( + testPageLimit, + 0, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) expect(actual.datasetPreviews.length).toEqual(1) expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return second dataset preview page', async () => { - const actual = await sut.getAllDatasetPreviews(testPageLimit, 1) + const actual = await sut.getAllDatasetPreviews( + testPageLimit, + 1, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) expect(actual.datasetPreviews.length).toEqual(1) expect(actual.datasetPreviews[0].persistentId).toMatch(thirdDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return third dataset preview page', async () => { - const actual = await sut.getAllDatasetPreviews(testPageLimit, 2) + const actual = await sut.getAllDatasetPreviews( + testPageLimit, + 2, + TestConstants.TEST_CREATED_COLLECTION_ALIAS + ) expect(actual.datasetPreviews.length).toEqual(1) expect(actual.datasetPreviews[0].persistentId).toMatch(secondDatasetIds.persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) test('should return fourth dataset preview page', async () => { - const actual = await sut.getAllDatasetPreviews(testPageLimit, 3) - expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].persistentId).toMatch(firstDatasetIds.persistentId) - expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) - }) - - test('should return datasets in the specified collection', async () => { const actual = await sut.getAllDatasetPreviews( testPageLimit, - 0, + 3, TestConstants.TEST_CREATED_COLLECTION_ALIAS ) - expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.totalDatasetCount).toEqual(1) + expect(actual.datasetPreviews[0].persistentId).toMatch(firstDatasetIds.persistentId) + expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) }) From 1dfb1fa00c25ff4c787f29abd11394cf41d8317a Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 9 Apr 2024 16:54:48 +0100 Subject: [PATCH 07/12] Fixed: using different collections to avoid conflicts in ITs --- .../collections/CollectionsRepository.test.ts | 10 +++---- .../datasets/DatasetsRepository.test.ts | 27 ++++++++++--------- test/testHelpers/TestConstants.ts | 3 ++- .../collections/collectionHelper.ts | 13 +++++---- .../collections/test-collection-1.json | 4 +-- .../collections/test-collection-2.json | 16 +++++++++++ test/testHelpers/datasets/datasetHelper.ts | 10 ++++--- 7 files changed, 54 insertions(+), 29 deletions(-) create mode 100644 test/testHelpers/collections/test-collection-2.json diff --git a/test/integration/collections/CollectionsRepository.test.ts b/test/integration/collections/CollectionsRepository.test.ts index 3fba936e..474b7da9 100644 --- a/test/integration/collections/CollectionsRepository.test.ts +++ b/test/integration/collections/CollectionsRepository.test.ts @@ -9,7 +9,7 @@ import { } from '../../testHelpers/collections/collectionHelper' import { ROOT_COLLECTION_ALIAS } from '../../../src/collections/domain/models/Collection' -describe.skip('CollectionsRepository', () => { +describe('CollectionsRepository', () => { const testGetCollection: CollectionsRepository = new CollectionsRepository() beforeAll(async () => { @@ -19,7 +19,7 @@ describe.skip('CollectionsRepository', () => { process.env.TEST_API_KEY ) try { - await createCollectionViaApi() + await createCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2) } catch (error) { throw new Error('Tests beforeAll(): Error while creating test collection') } @@ -32,7 +32,7 @@ describe.skip('CollectionsRepository', () => { process.env.TEST_API_KEY ) try { - await deleteCollectionViaApi() + await deleteCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2) } catch (error) { throw new Error('Tests afterAll(): Error while deleting test collection') } @@ -49,9 +49,9 @@ describe.skip('CollectionsRepository', () => { describe('by string alias', () => { test('should return collection when it exists filtering by id AS (alias)', async () => { const actual = await testGetCollection.getCollection( - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_2 ) - expect(actual.alias).toBe(TestConstants.TEST_CREATED_COLLECTION_ALIAS) + expect(actual.alias).toBe(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2) }) test('should return error when collection does not exist', async () => { diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index 23c15279..4edef23f 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -72,7 +72,7 @@ describe('DatasetsRepository', () => { const createCollection = async () => { try { - await createCollectionViaApi() + await createCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_1) } catch (error) { throw new Error('Tests beforeAll(): Error while creating test collection') } @@ -82,22 +82,25 @@ describe('DatasetsRepository', () => { try { firstDatasetIds = await createDataset.execute( TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) secondDatasetIds = await createDataset.execute( TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) thirdDatasetIds = await createDataset.execute( TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) fourthDatasetIds = await createDataset.execute( TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) - await waitForDatasetsIndexedInSolr(expectedTotalDatasetCount) + await waitForDatasetsIndexedInSolr( + expectedTotalDatasetCount, + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ) } catch (error) { throw new Error('Tests beforeAll(): Error while creating test datasets') } @@ -116,7 +119,7 @@ describe('DatasetsRepository', () => { const deleteCollection = async () => { try { - await deleteCollectionViaApi() + await deleteCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_1) } catch (error) { throw new Error('Tests afterAll(): Error while deleting test collection') } @@ -126,7 +129,7 @@ describe('DatasetsRepository', () => { const actual: DatasetPreviewSubset = await sut.getAllDatasetPreviews( undefined, undefined, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(expectedTotalDatasetCount) expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) @@ -137,7 +140,7 @@ describe('DatasetsRepository', () => { const actual = await sut.getAllDatasetPreviews( testPageLimit, 0, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(1) expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) @@ -148,7 +151,7 @@ describe('DatasetsRepository', () => { const actual = await sut.getAllDatasetPreviews( testPageLimit, 1, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(1) expect(actual.datasetPreviews[0].persistentId).toMatch(thirdDatasetIds.persistentId) @@ -159,7 +162,7 @@ describe('DatasetsRepository', () => { const actual = await sut.getAllDatasetPreviews( testPageLimit, 2, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(1) expect(actual.datasetPreviews[0].persistentId).toMatch(secondDatasetIds.persistentId) @@ -170,7 +173,7 @@ describe('DatasetsRepository', () => { const actual = await sut.getAllDatasetPreviews( testPageLimit, 3, - TestConstants.TEST_CREATED_COLLECTION_ALIAS + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(1) expect(actual.datasetPreviews[0].persistentId).toMatch(firstDatasetIds.persistentId) diff --git a/test/testHelpers/TestConstants.ts b/test/testHelpers/TestConstants.ts index d54fa0fa..602443b5 100644 --- a/test/testHelpers/TestConstants.ts +++ b/test/testHelpers/TestConstants.ts @@ -47,7 +47,8 @@ export class TestConstants { } static readonly TEST_DUMMY_COLLECTION_ID = 10001 static readonly TEST_DUMMY_COLLECTION_ALIAS = 'dummyCollectionId' - static readonly TEST_CREATED_COLLECTION_ALIAS = 'testCollection' + static readonly TEST_CREATED_COLLECTION_ALIAS_1 = 'testCollection1' + static readonly TEST_CREATED_COLLECTION_ALIAS_2 = 'testCollection2' static readonly TEST_NEW_DATASET_DTO: NewDatasetDTO = { license: { name: 'CC0 1.0', diff --git a/test/testHelpers/collections/collectionHelper.ts b/test/testHelpers/collections/collectionHelper.ts index 160b28a7..44b8c6c7 100644 --- a/test/testHelpers/collections/collectionHelper.ts +++ b/test/testHelpers/collections/collectionHelper.ts @@ -3,7 +3,8 @@ import { DvObjectType } from '../../../src' import { CollectionPayload } from '../../../src/collections/infra/repositories/transformers/CollectionPayload' import { TestConstants } from '../TestConstants' import axios from 'axios' -import collectionJson from './test-collection-1.json' +import collectionJson1 from './test-collection-1.json' +import collectionJson2 from './test-collection-2.json' const COLLECTION_ID = 11111 const COLLECTION_ALIAS_STR = 'secondCollection' @@ -39,17 +40,19 @@ export const createCollectionPayload = (): CollectionPayload => { return collectionPayload } -export async function createCollectionViaApi(): Promise { +export async function createCollectionViaApi(collectionAlias: string): Promise { return await axios.post( `${TestConstants.TEST_API_URL}/dataverses/root`, - collectionJson, + collectionAlias == TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ? collectionJson1 + : collectionJson2, DATAVERSE_API_REQUEST_HEADERS ) } -export async function deleteCollectionViaApi(): Promise { +export async function deleteCollectionViaApi(collectionAlias: string): Promise { return await axios.delete( - `${TestConstants.TEST_API_URL}/dataverses/${TestConstants.TEST_CREATED_COLLECTION_ALIAS}`, + `${TestConstants.TEST_API_URL}/dataverses/${collectionAlias}`, DATAVERSE_API_REQUEST_HEADERS ) } diff --git a/test/testHelpers/collections/test-collection-1.json b/test/testHelpers/collections/test-collection-1.json index 6cd57366..fc6881b4 100644 --- a/test/testHelpers/collections/test-collection-1.json +++ b/test/testHelpers/collections/test-collection-1.json @@ -1,6 +1,6 @@ { - "id": 4, - "alias": "testCollection", + "id": 100, + "alias": "testCollection1", "name": "Scientific Research", "dataverseContacts": [ { diff --git a/test/testHelpers/collections/test-collection-2.json b/test/testHelpers/collections/test-collection-2.json new file mode 100644 index 00000000..a0e2d816 --- /dev/null +++ b/test/testHelpers/collections/test-collection-2.json @@ -0,0 +1,16 @@ +{ + "id": 101, + "alias": "testCollection2", + "name": "Scientific Research", + "dataverseContacts": [ + { + "contactEmail": "pi@example.edu" + }, + { + "contactEmail": "student@example.edu" + } + ], + "affiliation": "Scientific Research University", + "description": "We do all the science.", + "dataverseType": "LABORATORY" +} diff --git a/test/testHelpers/datasets/datasetHelper.ts b/test/testHelpers/datasets/datasetHelper.ts index b7f7c35d..80019bb3 100644 --- a/test/testHelpers/datasets/datasetHelper.ts +++ b/test/testHelpers/datasets/datasetHelper.ts @@ -289,14 +289,17 @@ export const waitForNoLocks = async ( } export async function waitForDatasetsIndexedInSolr( - expectedNumberOfIndexedDatasets: number + expectedNumberOfIndexedDatasets: number, + collectionAlias: string ): Promise { - console.log('Waiting for datasets indexing in Solr...') let datasetsIndexed = false let retry = 0 while (!datasetsIndexed && retry < 10) { await axios - .get(`${TestConstants.TEST_API_URL}/search?q=*&type=dataset`, DATAVERSE_API_REQUEST_HEADERS) + .get( + `${TestConstants.TEST_API_URL}/search?q=*&type=dataset&subtree=${collectionAlias}`, + DATAVERSE_API_REQUEST_HEADERS + ) .then((response) => { const nDatasets = response.data.data.items.length if (nDatasets === expectedNumberOfIndexedDatasets) { @@ -316,5 +319,4 @@ export async function waitForDatasetsIndexedInSolr( if (!datasetsIndexed) { throw new Error('Tests setup: Timeout reached while waiting for datasets indexing in Solr') } - console.log('Datasets indexed in Solr') } From 815f20fcaa5cb301733ac80b7c7e3c6af51dbebe Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 10 Apr 2024 10:50:46 +0100 Subject: [PATCH 08/12] Changed: enabled skipped test in CollectionsRepository --- .../collections/CollectionsRepository.test.ts | 13 ++++++++----- test/testHelpers/collections/test-collection-1.json | 1 - test/testHelpers/collections/test-collection-2.json | 1 - 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/test/integration/collections/CollectionsRepository.test.ts b/test/integration/collections/CollectionsRepository.test.ts index 474b7da9..9c4bc1bc 100644 --- a/test/integration/collections/CollectionsRepository.test.ts +++ b/test/integration/collections/CollectionsRepository.test.ts @@ -11,6 +11,7 @@ import { ROOT_COLLECTION_ALIAS } from '../../../src/collections/domain/models/Co describe('CollectionsRepository', () => { const testGetCollection: CollectionsRepository = new CollectionsRepository() + let testCollectionId: number beforeAll(async () => { ApiConfig.init( @@ -19,7 +20,10 @@ describe('CollectionsRepository', () => { process.env.TEST_API_KEY ) try { - await createCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2) + await createCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2).then( + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (response: any) => (testCollectionId = response.data.data.id) + ) } catch (error) { throw new Error('Tests beforeAll(): Error while creating test collection') } @@ -65,10 +69,9 @@ describe('CollectionsRepository', () => { }) }) describe('by numeric id', () => { - // FIXME - test.skip('should return collection when it exists filtering by id AS (id)', async () => { - const actual = await testGetCollection.getCollection(1) - expect(actual.id).toBe(1) + test('should return collection when it exists filtering by id AS (id)', async () => { + const actual = await testGetCollection.getCollection(testCollectionId) + expect(actual.id).toBe(testCollectionId) }) test('should return error when collection does not exist', async () => { diff --git a/test/testHelpers/collections/test-collection-1.json b/test/testHelpers/collections/test-collection-1.json index fc6881b4..e3449bf7 100644 --- a/test/testHelpers/collections/test-collection-1.json +++ b/test/testHelpers/collections/test-collection-1.json @@ -1,5 +1,4 @@ { - "id": 100, "alias": "testCollection1", "name": "Scientific Research", "dataverseContacts": [ diff --git a/test/testHelpers/collections/test-collection-2.json b/test/testHelpers/collections/test-collection-2.json index a0e2d816..e0cfaf60 100644 --- a/test/testHelpers/collections/test-collection-2.json +++ b/test/testHelpers/collections/test-collection-2.json @@ -1,5 +1,4 @@ { - "id": 101, "alias": "testCollection2", "name": "Scientific Research", "dataverseContacts": [ From e431bc0a8713ab2eff7e295b90b490a8ba39d280 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 10 Apr 2024 11:01:53 +0100 Subject: [PATCH 09/12] Added: destroying created datasets on functional tests --- test/functional/datasets/CreateDataset.test.ts | 2 ++ test/functional/datasets/PublishDataset.test.ts | 15 +++++++++++---- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/test/functional/datasets/CreateDataset.test.ts b/test/functional/datasets/CreateDataset.test.ts index 3ef15611..d1bf729f 100644 --- a/test/functional/datasets/CreateDataset.test.ts +++ b/test/functional/datasets/CreateDataset.test.ts @@ -3,6 +3,7 @@ import { ApiConfig } from '../../../src' import { TestConstants } from '../../testHelpers/TestConstants' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError' +import { deleteUnpublishedDatasetViaApi } from '../../testHelpers/datasets/datasetHelper' describe('execute', () => { beforeEach(async () => { @@ -56,6 +57,7 @@ describe('execute', () => { expect(createdDatasetIdentifiers).not.toBeNull() expect(createdDatasetIdentifiers.numericId).not.toBeNull() expect(createdDatasetIdentifiers.persistentId).not.toBeNull() + await deleteUnpublishedDatasetViaApi(createdDatasetIdentifiers.numericId) } }) diff --git a/test/functional/datasets/PublishDataset.test.ts b/test/functional/datasets/PublishDataset.test.ts index 54225df5..eb3955be 100644 --- a/test/functional/datasets/PublishDataset.test.ts +++ b/test/functional/datasets/PublishDataset.test.ts @@ -7,7 +7,10 @@ import { } from '../../../src' import { TestConstants } from '../../testHelpers/TestConstants' import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig' -import { waitForNoLocks } from '../../testHelpers/datasets/datasetHelper' +import { + waitForNoLocks, + deletePublishedDatasetViaApi +} from '../../testHelpers/datasets/datasetHelper' const testNewDataset = { license: { @@ -57,12 +60,16 @@ describe('execute', () => { }) test('should successfully publish a dataset', async () => { - const dataset = await createDataset.execute(testNewDataset) + const createdDatasetIdentifiers = await createDataset.execute(testNewDataset) - const response = await publishDataset.execute(dataset.persistentId, VersionUpdateType.MAJOR) - await waitForNoLocks(dataset.numericId, 10) + const response = await publishDataset.execute( + createdDatasetIdentifiers.persistentId, + VersionUpdateType.MAJOR + ) + await waitForNoLocks(createdDatasetIdentifiers.numericId, 10) expect(response).toBeUndefined() + await deletePublishedDatasetViaApi(createdDatasetIdentifiers.persistentId) }) test('should throw an error when trying to publish a dataset that does not exist', async () => { From 8e3c0be1125712af4705a58856fe800d13d7959f Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Apr 2024 16:15:03 +0100 Subject: [PATCH 10/12] Refactor: try catch blocks moved to helper test classes --- .../collections/CollectionsRepository.test.ts | 18 +- .../datasets/DatasetsRepository.test.ts | 164 +++++------------- .../collections/collectionHelper.ts | 34 ++-- test/testHelpers/datasets/datasetHelper.ts | 76 +++++--- 4 files changed, 117 insertions(+), 175 deletions(-) diff --git a/test/integration/collections/CollectionsRepository.test.ts b/test/integration/collections/CollectionsRepository.test.ts index 9c4bc1bc..ef92cd96 100644 --- a/test/integration/collections/CollectionsRepository.test.ts +++ b/test/integration/collections/CollectionsRepository.test.ts @@ -8,6 +8,7 @@ import { deleteCollectionViaApi } from '../../testHelpers/collections/collectionHelper' import { ROOT_COLLECTION_ALIAS } from '../../../src/collections/domain/models/Collection' +import { CollectionPayload } from '../../../src/collections/infra/repositories/transformers/CollectionPayload' describe('CollectionsRepository', () => { const testGetCollection: CollectionsRepository = new CollectionsRepository() @@ -19,14 +20,9 @@ describe('CollectionsRepository', () => { DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY ) - try { - await createCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2).then( - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (response: any) => (testCollectionId = response.data.data.id) - ) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test collection') - } + await createCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2).then( + (collectionPayload: CollectionPayload) => (testCollectionId = collectionPayload.id) + ) }) afterAll(async () => { @@ -35,11 +31,7 @@ describe('CollectionsRepository', () => { DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY ) - try { - await deleteCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test collection') - } + await deleteCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_2) }) describe('getCollection', () => { diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index 4edef23f..a5dd4d7f 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -71,58 +71,42 @@ describe('DatasetsRepository', () => { }) const createCollection = async () => { - try { - await createCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_1) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test collection') - } + await createCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_1) } const createDatasets = async () => { - try { - firstDatasetIds = await createDataset.execute( - TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) - secondDatasetIds = await createDataset.execute( - TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) - thirdDatasetIds = await createDataset.execute( - TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) - fourthDatasetIds = await createDataset.execute( - TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) + firstDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ) + secondDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ) + thirdDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ) + fourthDatasetIds = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ) - await waitForDatasetsIndexedInSolr( - expectedTotalDatasetCount, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test datasets') - } + await waitForDatasetsIndexedInSolr( + expectedTotalDatasetCount, + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ) } const deleteDatasets = async () => { - try { - await deleteUnpublishedDatasetViaApi(firstDatasetIds.numericId) - await deleteUnpublishedDatasetViaApi(secondDatasetIds.numericId) - await deleteUnpublishedDatasetViaApi(thirdDatasetIds.numericId) - await deleteUnpublishedDatasetViaApi(fourthDatasetIds.numericId) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test datasets') - } + await deleteUnpublishedDatasetViaApi(firstDatasetIds.numericId) + await deleteUnpublishedDatasetViaApi(secondDatasetIds.numericId) + await deleteUnpublishedDatasetViaApi(thirdDatasetIds.numericId) + await deleteUnpublishedDatasetViaApi(fourthDatasetIds.numericId) } const deleteCollection = async () => { - try { - await deleteCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_1) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test collection') - } + await deleteCollectionViaApi(TestConstants.TEST_CREATED_COLLECTION_ALIAS_1) } test('should return all dataset previews when no pagination params are defined', async () => { @@ -193,19 +177,11 @@ describe('DatasetsRepository', () => { let testDatasetIds: CreatedDatasetIdentifiers beforeAll(async () => { - try { - testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test dataset') - } + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) }) afterAll(async () => { - try { - await deletePublishedDatasetViaApi(testDatasetIds.persistentId) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test dataset') - } + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) }) test('should return dataset when it exists filtering by id and version id', async () => { @@ -270,19 +246,11 @@ describe('DatasetsRepository', () => { let testDatasetIds: CreatedDatasetIdentifiers beforeAll(async () => { - try { - testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test dataset') - } + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) }) afterAll(async () => { - try { - await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test dataset') - } + await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) }) test('should return dataset when it exists filtering by persistent id and version id', async () => { @@ -317,25 +285,13 @@ describe('DatasetsRepository', () => { let privateUrlToken: string beforeAll(async () => { - try { - testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test dataset') - } - try { - const response = await createPrivateUrlViaApi(testDatasetIds.numericId) - privateUrlToken = response.data.data.token - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating Dataset private URL') - } + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) + const response = await createPrivateUrlViaApi(testDatasetIds.numericId) + privateUrlToken = response.data.data.token }) afterAll(async () => { - try { - await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test dataset') - } + await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) }) describe('getPrivateUrlDataset', () => { @@ -369,19 +325,11 @@ describe('DatasetsRepository', () => { let testDatasetIds: CreatedDatasetIdentifiers beforeAll(async () => { - try { - testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test dataset') - } + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) }) afterAll(async () => { - try { - await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test dataset') - } + await deleteUnpublishedDatasetViaApi(testDatasetIds.numericId) }) test('should return user permissions filtering by dataset id', async () => { @@ -408,27 +356,15 @@ describe('DatasetsRepository', () => { let testDatasetIds: CreatedDatasetIdentifiers beforeAll(async () => { - try { - testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test dataset') - } + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) }) afterAll(async () => { - try { - await deletePublishedDatasetViaApi(testDatasetIds.persistentId) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test dataset') - } + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) }) test('should return list of dataset locks by dataset id for a dataset while publishing', async () => { await publishDatasetViaApi(testDatasetIds.numericId) - .then() - .catch((error) => { - console.log(JSON.stringify(error)) - }) const actual = await sut.getDatasetLocks(testDatasetIds.numericId) expect(actual.length).toBe(1) expect(actual[0].lockType).toBe(DatasetLockType.FINALIZE_PUBLICATION) @@ -449,19 +385,11 @@ describe('DatasetsRepository', () => { let testDatasetIds: CreatedDatasetIdentifiers beforeAll(async () => { - try { - testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test dataset') - } + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) }) afterAll(async () => { - try { - await deletePublishedDatasetViaApi(testDatasetIds.persistentId) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test dataset') - } + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) }) test('should return citation when dataset exists', async () => { @@ -585,19 +513,11 @@ describe('DatasetsRepository', () => { let testDatasetIds: CreatedDatasetIdentifiers beforeAll(async () => { - try { - testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) - } catch (error) { - throw new Error('Tests beforeAll(): Error while creating test dataset') - } + testDatasetIds = await createDataset.execute(TestConstants.TEST_NEW_DATASET_DTO) }) afterAll(async () => { - try { - await deletePublishedDatasetViaApi(testDatasetIds.persistentId) - } catch (error) { - throw new Error('Tests afterAll(): Error while deleting test dataset') - } + await deletePublishedDatasetViaApi(testDatasetIds.persistentId) }) test('should publish a new dataset version', async () => { diff --git a/test/testHelpers/collections/collectionHelper.ts b/test/testHelpers/collections/collectionHelper.ts index 44b8c6c7..803c3d4f 100644 --- a/test/testHelpers/collections/collectionHelper.ts +++ b/test/testHelpers/collections/collectionHelper.ts @@ -40,19 +40,29 @@ export const createCollectionPayload = (): CollectionPayload => { return collectionPayload } -export async function createCollectionViaApi(collectionAlias: string): Promise { - return await axios.post( - `${TestConstants.TEST_API_URL}/dataverses/root`, - collectionAlias == TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ? collectionJson1 - : collectionJson2, - DATAVERSE_API_REQUEST_HEADERS - ) +export async function createCollectionViaApi(collectionAlias: string): Promise { + try { + return await axios + .post( + `${TestConstants.TEST_API_URL}/dataverses/root`, + collectionAlias == TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ? collectionJson1 + : collectionJson2, + DATAVERSE_API_REQUEST_HEADERS + ) + .then((response) => response.data.data) + } catch (error) { + throw new Error(`Error while creating test collection ${collectionAlias}`) + } } export async function deleteCollectionViaApi(collectionAlias: string): Promise { - return await axios.delete( - `${TestConstants.TEST_API_URL}/dataverses/${collectionAlias}`, - DATAVERSE_API_REQUEST_HEADERS - ) + try { + return await axios.delete( + `${TestConstants.TEST_API_URL}/dataverses/${collectionAlias}`, + DATAVERSE_API_REQUEST_HEADERS + ) + } catch (error) { + throw new Error(`Error while deleting test collection ${collectionAlias}`) + } } diff --git a/test/testHelpers/datasets/datasetHelper.ts b/test/testHelpers/datasets/datasetHelper.ts index 80019bb3..14aab6e7 100644 --- a/test/testHelpers/datasets/datasetHelper.ts +++ b/test/testHelpers/datasets/datasetHelper.ts @@ -203,19 +203,27 @@ export const createDatasetVersionPayload = ( } export const deleteUnpublishedDatasetViaApi = async (datasetId: number): Promise => { - return await axios.delete( - `${TestConstants.TEST_API_URL}/datasets/${datasetId}`, - DATAVERSE_API_REQUEST_HEADERS - ) + try { + return await axios.delete( + `${TestConstants.TEST_API_URL}/datasets/${datasetId}`, + DATAVERSE_API_REQUEST_HEADERS + ) + } catch (error) { + throw new Error(`Error while deleting unpublished test dataset ${datasetId}`) + } } export const deletePublishedDatasetViaApi = async ( datasetPersistentId: string ): Promise => { - return await axios.delete( - `${TestConstants.TEST_API_URL}/datasets/:persistentId/destroy?persistentId=${datasetPersistentId}`, - DATAVERSE_API_REQUEST_HEADERS - ) + try { + return await axios.delete( + `${TestConstants.TEST_API_URL}/datasets/:persistentId/destroy?persistentId=${datasetPersistentId}`, + DATAVERSE_API_REQUEST_HEADERS + ) + } catch (error) { + throw new Error(`Error while deleting published test dataset ${datasetPersistentId}`) + } } export const createDatasetLicenseModel = (withIconUri = true): DatasetLicense => { @@ -230,31 +238,43 @@ export const createDatasetLicenseModel = (withIconUri = true): DatasetLicense => } export const publishDatasetViaApi = async (datasetId: number): Promise => { - return await axios.post( - `${TestConstants.TEST_API_URL}/datasets/${datasetId}/actions/:publish?type=major`, - {}, - DATAVERSE_API_REQUEST_HEADERS - ) + try { + return await axios.post( + `${TestConstants.TEST_API_URL}/datasets/${datasetId}/actions/:publish?type=major`, + {}, + DATAVERSE_API_REQUEST_HEADERS + ) + } catch (error) { + throw new Error(`Error while publishing test dataset ${datasetId}`) + } } export const deaccessionDatasetViaApi = async ( datasetId: number, versionId: string ): Promise => { - const data = { deaccessionReason: 'Test reason.' } - return await axios.post( - `${TestConstants.TEST_API_URL}/datasets/${datasetId}/versions/${versionId}/deaccession`, - JSON.stringify(data), - DATAVERSE_API_REQUEST_HEADERS - ) + try { + const data = { deaccessionReason: 'Test reason.' } + return await axios.post( + `${TestConstants.TEST_API_URL}/datasets/${datasetId}/versions/${versionId}/deaccession`, + JSON.stringify(data), + DATAVERSE_API_REQUEST_HEADERS + ) + } catch (error) { + throw new Error(`Error while deaccessioning test dataset ${datasetId}`) + } } export const createPrivateUrlViaApi = async (datasetId: number): Promise => { - return await axios.post( - `${TestConstants.TEST_API_URL}/datasets/${datasetId}/privateUrl`, - {}, - DATAVERSE_API_REQUEST_HEADERS - ) + try { + return await axios.post( + `${TestConstants.TEST_API_URL}/datasets/${datasetId}/privateUrl`, + {}, + DATAVERSE_API_REQUEST_HEADERS + ) + } catch (error) { + throw new Error(`Error while creating private URL for dataset ${datasetId}`) + } } export const waitForNoLocks = async ( @@ -308,15 +328,15 @@ export async function waitForDatasetsIndexedInSolr( }) .catch((error) => { console.error( - `Tests setup: Error while waiting for datasets indexing in Solr: [${ - error.response.status - }]${error.response.data ? ` ${error.response.data.message}` : ''}` + `Error while waiting for datasets indexing in Solr: [${error.response.status}]${ + error.response.data ? ` ${error.response.data.message}` : '' + }` ) }) await new Promise((resolve) => setTimeout(resolve, 1000)) retry++ } if (!datasetsIndexed) { - throw new Error('Tests setup: Timeout reached while waiting for datasets indexing in Solr') + throw new Error('Timeout reached while waiting for datasets indexing in Solr') } } From 347eab95de1ebce2976b129442a4288bab92d032 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Apr 2024 16:37:37 +0100 Subject: [PATCH 11/12] Refactor: using array in DatasetsRepository IT to setup test datasets --- .../datasets/DatasetsRepository.test.ts | 44 +++++++------------ 1 file changed, 15 insertions(+), 29 deletions(-) diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index a5dd4d7f..8c985afd 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -55,10 +55,7 @@ describe('DatasetsRepository', () => { describe('getAllDatasetPreviews', () => { const testPageLimit = 1 const expectedTotalDatasetCount = 4 - let firstDatasetIds: CreatedDatasetIdentifiers - let secondDatasetIds: CreatedDatasetIdentifiers - let thirdDatasetIds: CreatedDatasetIdentifiers - let fourthDatasetIds: CreatedDatasetIdentifiers + let createdDatasetIds: CreatedDatasetIdentifiers[] = [] beforeAll(async () => { await createCollection() @@ -75,22 +72,12 @@ describe('DatasetsRepository', () => { } const createDatasets = async () => { - firstDatasetIds = await createDataset.execute( - TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) - secondDatasetIds = await createDataset.execute( - TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) - thirdDatasetIds = await createDataset.execute( - TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) - fourthDatasetIds = await createDataset.execute( - TestConstants.TEST_NEW_DATASET_DTO, - TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 - ) + for (let i = 0; i < expectedTotalDatasetCount; i++) { + createdDatasetIds[i] = await createDataset.execute( + TestConstants.TEST_NEW_DATASET_DTO, + TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 + ) + } await waitForDatasetsIndexedInSolr( expectedTotalDatasetCount, @@ -99,10 +86,9 @@ describe('DatasetsRepository', () => { } const deleteDatasets = async () => { - await deleteUnpublishedDatasetViaApi(firstDatasetIds.numericId) - await deleteUnpublishedDatasetViaApi(secondDatasetIds.numericId) - await deleteUnpublishedDatasetViaApi(thirdDatasetIds.numericId) - await deleteUnpublishedDatasetViaApi(fourthDatasetIds.numericId) + for (let i = 0; i < expectedTotalDatasetCount; i++) { + await deleteUnpublishedDatasetViaApi(createdDatasetIds[i].numericId) + } } const deleteCollection = async () => { @@ -116,7 +102,7 @@ describe('DatasetsRepository', () => { TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(expectedTotalDatasetCount) - expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) + expect(actual.datasetPreviews[0].persistentId).toMatch(createdDatasetIds[3].persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) @@ -127,7 +113,7 @@ describe('DatasetsRepository', () => { TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].persistentId).toMatch(fourthDatasetIds.persistentId) + expect(actual.datasetPreviews[0].persistentId).toMatch(createdDatasetIds[3].persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) @@ -138,7 +124,7 @@ describe('DatasetsRepository', () => { TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].persistentId).toMatch(thirdDatasetIds.persistentId) + expect(actual.datasetPreviews[0].persistentId).toMatch(createdDatasetIds[2].persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) @@ -149,7 +135,7 @@ describe('DatasetsRepository', () => { TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].persistentId).toMatch(secondDatasetIds.persistentId) + expect(actual.datasetPreviews[0].persistentId).toMatch(createdDatasetIds[1].persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) @@ -160,7 +146,7 @@ describe('DatasetsRepository', () => { TestConstants.TEST_CREATED_COLLECTION_ALIAS_1 ) expect(actual.datasetPreviews.length).toEqual(1) - expect(actual.datasetPreviews[0].persistentId).toMatch(firstDatasetIds.persistentId) + expect(actual.datasetPreviews[0].persistentId).toMatch(createdDatasetIds[0].persistentId) expect(actual.totalDatasetCount).toEqual(expectedTotalDatasetCount) }) }) From f9b41831997df6cf332f073da13bf17a2d754111 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Apr 2024 16:39:39 +0100 Subject: [PATCH 12/12] Fixed: eslint let->const --- test/integration/datasets/DatasetsRepository.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index 8c985afd..5ecbbf76 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -55,7 +55,7 @@ describe('DatasetsRepository', () => { describe('getAllDatasetPreviews', () => { const testPageLimit = 1 const expectedTotalDatasetCount = 4 - let createdDatasetIds: CreatedDatasetIdentifiers[] = [] + const createdDatasetIds: CreatedDatasetIdentifiers[] = [] beforeAll(async () => { await createCollection()