From 1b3d5b52e64055ba4a52281e765390c8ffbabc6b Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 3 Jan 2024 13:53:00 +0000 Subject: [PATCH 01/96] Changed: includeFiles query param to excludeFiles in getDataset repository API call --- src/datasets/infra/repositories/DatasetsRepository.ts | 2 +- test/unit/datasets/DatasetsRepository.test.ts | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index 541f1698..50211ff6 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -43,7 +43,7 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi true, { includeDeaccessioned: includeDeaccessioned, - includeFiles: false, + excludeFiles: true, }, ) .then((response) => transformVersionResponseToDataset(response)) diff --git a/test/unit/datasets/DatasetsRepository.test.ts b/test/unit/datasets/DatasetsRepository.test.ts index 261df777..05a9bbe9 100644 --- a/test/unit/datasets/DatasetsRepository.test.ts +++ b/test/unit/datasets/DatasetsRepository.test.ts @@ -87,11 +87,11 @@ describe('DatasetsRepository', () => { describe('getDataset', () => { const testIncludeDeaccessioned = false; const expectedRequestConfigApiKey = { - params: { includeDeaccessioned: testIncludeDeaccessioned, includeFiles: false }, + params: { includeDeaccessioned: testIncludeDeaccessioned, excludeFiles: true }, headers: TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY.headers, }; const expectedRequestConfigSessionCookie = { - params: { includeDeaccessioned: testIncludeDeaccessioned, includeFiles: false }, + params: { includeDeaccessioned: testIncludeDeaccessioned, excludeFiles: true }, withCredentials: TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE.withCredentials, headers: TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE.headers, }; From 086b981e69ba97e5872f89ddfe487de285eab4a2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 3 Jan 2024 15:09:44 +0000 Subject: [PATCH 02/96] Added: getDataset includeDeaccesioned test cases --- .../datasets/DatasetsRepository.test.ts | 76 ++++++++++++----- test/integration/environment/setup.js | 8 +- test/testHelpers/TestConstants.ts | 1 + test/testHelpers/datasets/test-dataset-3.json | 85 +++++++++++++++++++ 4 files changed, 146 insertions(+), 24 deletions(-) create mode 100644 test/testHelpers/datasets/test-dataset-3.json diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index a3064195..4cc4991b 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -25,32 +25,40 @@ describe('DatasetsRepository', () => { describe('getAllDatasetPreviews', () => { const testPageLimit = 1; + const expectedTotalDatasetCount = 3; test('should return all dataset previews when no pagination params are defined', async () => { const actual: DatasetPreviewSubset = await sut.getAllDatasetPreviews(); - assert.match(actual.datasetPreviews.length, 2); - assert.match(actual.datasetPreviews[0].title, 'Second Dataset'); - assert.match(actual.totalDatasetCount, 2); + assert.match(actual.datasetPreviews.length, expectedTotalDatasetCount); + assert.match(actual.datasetPreviews[0].title, 'Third Dataset'); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); test('should return first dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 0); assert.match(actual.datasetPreviews.length, 1); - assert.match(actual.datasetPreviews[0].title, 'Second Dataset'); - assert.match(actual.totalDatasetCount, 2); + assert.match(actual.datasetPreviews[0].title, 'Third Dataset'); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); test('should return second dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 1); assert.match(actual.datasetPreviews.length, 1); - assert.match(actual.datasetPreviews[0].title, 'First Dataset'); - assert.match(actual.totalDatasetCount, 2); + assert.match(actual.datasetPreviews[0].title, 'Second Dataset'); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); test('should return third dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 2); + assert.match(actual.datasetPreviews.length, 1); + assert.match(actual.datasetPreviews[0].title, 'First Dataset'); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); + }); + + test('should return fourth dataset preview page', async () => { + const actual = await sut.getAllDatasetPreviews(testPageLimit, 3); assert.match(actual.datasetPreviews.length, 0); - assert.match(actual.totalDatasetCount, 2); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); }); @@ -74,6 +82,41 @@ describe('DatasetsRepository', () => { expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_1_ID); }); + test('should return dataset when it is deaccessioned and includeDeaccessioned param is set', async () => { + await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID) + .then() + .catch(() => { + assert.fail('Error while publishing test Dataset'); + }); + + await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_2_ID, 10) + .then() + .catch(() => { + assert.fail('Error while waiting for no locks'); + }); + + await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID, '1.0') + .then() + .catch((error) => { + console.log(JSON.stringify(error)); + assert.fail('Error while deaccessioning test Dataset'); + }); + + const actual = await sut.getDataset(TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, true); + + expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_2_ID); + }); + + test('should return error when dataset is deaccessioned and includeDeaccessioned param is not set', async () => { + let error: ReadError = undefined; + await sut.getDataset(TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, false).catch((e) => (error = e)); + + assert.match( + error.message, + `There was an error when reading the resource. Reason was: [404] Dataset version ${latestVersionId} of dataset ${TestConstants.TEST_CREATED_DATASET_2_ID} not found`, + ); + }); + test('should return error when dataset does not exist', async () => { let error: ReadError = undefined; await sut.getDataset(nonExistentTestDatasetId, latestVersionId, false).catch((e) => (error = e)); @@ -176,12 +219,12 @@ describe('DatasetsRepository', () => { describe('getDatasetLocks', () => { test('should return list of dataset locks by dataset id for a dataset while publishing', async () => { - await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID) + await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_3_ID) .then() .catch(() => { assert.fail('Error while publishing test Dataset'); }); - const actual = await sut.getDatasetLocks(TestConstants.TEST_CREATED_DATASET_2_ID); + const actual = await sut.getDatasetLocks(TestConstants.TEST_CREATED_DATASET_3_ID); assert.match(actual.length, 1); assert.match(actual[0].lockType, DatasetLockType.FINALIZE_PUBLICATION); assert.match(actual[0].userId, 'dataverseAdmin'); @@ -222,19 +265,6 @@ describe('DatasetsRepository', () => { }); test('should return citation when dataset is deaccessioned', async () => { - await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_2_ID, 10) - .then() - .catch(() => { - assert.fail('Error while waiting for no locks'); - }); - - await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID, '1.0') - .then() - .catch((error) => { - console.log(JSON.stringify(error)); - assert.fail('Error while deaccessioning test Dataset'); - }); - const actualDatasetCitation = await sut.getDatasetCitation( TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, diff --git a/test/integration/environment/setup.js b/test/integration/environment/setup.js index 57cf5c06..49ec492f 100644 --- a/test/integration/environment/setup.js +++ b/test/integration/environment/setup.js @@ -4,6 +4,7 @@ const axios = require('axios'); const { TestConstants } = require('../../testHelpers/TestConstants'); const datasetJson1 = require('../../testHelpers/datasets/test-dataset-1.json'); const datasetJson2 = require('../../testHelpers/datasets/test-dataset-2.json'); +const datasetJson3 = require('../../testHelpers/datasets/test-dataset-3.json'); const COMPOSE_FILE = 'docker-compose.yml'; @@ -59,6 +60,11 @@ async function setupTestFixtures() { .catch((error) => { console.error('Tests setup: Error while creating test Dataset 2'); }); + await createDatasetViaApi(datasetJson3) + .then() + .catch((error) => { + console.error('Tests setup: Error while creating test Dataset 3'); + }); console.log('Test datasets created'); await waitForDatasetsIndexingInSolr(); } @@ -76,7 +82,7 @@ async function waitForDatasetsIndexingInSolr() { .get(`${TestConstants.TEST_API_URL}/search?q=*&type=dataset`, buildRequestHeaders()) .then((response) => { const nDatasets = response.data.data.items.length; - if (nDatasets == 2) { + if (nDatasets == 3) { datasetsIndexed = true; } }) diff --git a/test/testHelpers/TestConstants.ts b/test/testHelpers/TestConstants.ts index 5ef4dc4e..c637ff3b 100644 --- a/test/testHelpers/TestConstants.ts +++ b/test/testHelpers/TestConstants.ts @@ -44,4 +44,5 @@ export class TestConstants { }; static readonly TEST_CREATED_DATASET_1_ID = 2; static readonly TEST_CREATED_DATASET_2_ID = 3; + static readonly TEST_CREATED_DATASET_3_ID = 4; } diff --git a/test/testHelpers/datasets/test-dataset-3.json b/test/testHelpers/datasets/test-dataset-3.json new file mode 100644 index 00000000..4f867c90 --- /dev/null +++ b/test/testHelpers/datasets/test-dataset-3.json @@ -0,0 +1,85 @@ +{ + "datasetVersion": { + "license": { + "name": "CC0 1.0", + "uri": "http://creativecommons.org/publicdomain/zero/1.0", + "iconUri": "https://licensebuttons.net/p/zero/1.0/88x31.png" + }, + "metadataBlocks": { + "citation": { + "fields": [ + { + "value": "Third Dataset", + "typeClass": "primitive", + "multiple": false, + "typeName": "title" + }, + { + "value": [ + { + "authorName": { + "value": "Finch, Fiona", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorName" + }, + "authorAffiliation": { + "value": "Birds Inc.", + "typeClass": "primitive", + "multiple": false, + "typeName": "authorAffiliation" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "author" + }, + { + "value": [ + { + "datasetContactEmail": { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactEmail", + "value": "finch@mailinator.com" + }, + "datasetContactName": { + "typeClass": "primitive", + "multiple": false, + "typeName": "datasetContactName", + "value": "Finch, Fiona" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "datasetContact" + }, + { + "value": [ + { + "dsDescriptionValue": { + "value": "This is the description of the third dataset.", + "multiple": false, + "typeClass": "primitive", + "typeName": "dsDescriptionValue" + } + } + ], + "typeClass": "compound", + "multiple": true, + "typeName": "dsDescription" + }, + { + "value": ["Medicine, Health and Life Sciences"], + "typeClass": "controlledVocabulary", + "multiple": true, + "typeName": "subject" + } + ], + "displayName": "Citation Metadata" + } + } + } +} From 46fe6d32a2591bfe4ff0882c23c06a839608b8a4 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 4 Jan 2024 13:20:00 +0000 Subject: [PATCH 03/96] Added: test case for unauthenticated getDataset call for deaccessioned --- src/core/infra/repositories/ApiRepository.ts | 4 +++- test/integration/datasets/DatasetsRepository.test.ts | 12 +++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/core/infra/repositories/ApiRepository.ts b/src/core/infra/repositories/ApiRepository.ts index a58fe658..f0eafbb9 100644 --- a/src/core/infra/repositories/ApiRepository.ts +++ b/src/core/infra/repositories/ApiRepository.ts @@ -56,7 +56,9 @@ export abstract class ApiRepository { requestConfig.withCredentials = true; break; case DataverseApiAuthMechanism.API_KEY: - requestConfig.headers['X-Dataverse-Key'] = ApiConfig.dataverseApiKey; + if (typeof ApiConfig.dataverseApiKey !== 'undefined') { + requestConfig.headers['X-Dataverse-Key'] = ApiConfig.dataverseApiKey; + } break; } return requestConfig; diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index 4cc4991b..3ce4fe82 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -19,7 +19,11 @@ describe('DatasetsRepository', () => { const latestVersionId = DatasetNotNumberedVersion.LATEST; - beforeAll(async () => { + beforeEach(async () => { + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY); + }); + + afterEach(async () => { ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY); }); @@ -107,6 +111,12 @@ describe('DatasetsRepository', () => { expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_2_ID); }); + test('should return dataset when it is deaccessioned, includeDeaccessioned param is set, and user is unauthenticated', async () => { + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, undefined); + const actual = await sut.getDataset(TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, true); + expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_2_ID); + }); + test('should return error when dataset is deaccessioned and includeDeaccessioned param is not set', async () => { let error: ReadError = undefined; await sut.getDataset(TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, false).catch((e) => (error = e)); From fcd6e7344a72d8363997f1ea441fcaefea1f0901 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 10 Jan 2024 13:30:31 +0000 Subject: [PATCH 04/96] Stash: CreateDataset use case WIP. Pending validation and data access logic --- .../validators/NewResourceValidator.ts | 5 ++ .../validators/errors/EmptyFieldError.ts | 7 ++ .../validators/errors/FieldValidationError.ts | 11 +++ .../errors/ResourceValidationError.ts | 5 ++ src/datasets/domain/models/NewDataset.ts | 9 +++ .../repositories/IDatasetsRepository.ts | 2 + src/datasets/domain/useCases/CreateDataset.ts | 20 +++++ .../validators/NewDatasetValidator.ts | 27 +++++++ .../infra/repositories/DatasetsRepository.ts | 5 ++ .../domain/models/MetadataBlock.ts | 3 + .../transformers/metadataBlockTransformers.ts | 3 + test/testHelpers/datasets/newDatasetHelper.ts | 29 +++++++ .../metadataBlocks/metadataBlockHelper.ts | 16 ++++ test/unit/datasets/CreateDataset.test.ts | 76 +++++++++++++++++++ 14 files changed, 218 insertions(+) create mode 100644 src/core/domain/useCases/validators/NewResourceValidator.ts create mode 100644 src/core/domain/useCases/validators/errors/EmptyFieldError.ts create mode 100644 src/core/domain/useCases/validators/errors/FieldValidationError.ts create mode 100644 src/core/domain/useCases/validators/errors/ResourceValidationError.ts create mode 100644 src/datasets/domain/models/NewDataset.ts create mode 100644 src/datasets/domain/useCases/CreateDataset.ts create mode 100644 src/datasets/domain/useCases/validators/NewDatasetValidator.ts create mode 100644 test/testHelpers/datasets/newDatasetHelper.ts create mode 100644 test/unit/datasets/CreateDataset.test.ts diff --git a/src/core/domain/useCases/validators/NewResourceValidator.ts b/src/core/domain/useCases/validators/NewResourceValidator.ts new file mode 100644 index 00000000..a816bd07 --- /dev/null +++ b/src/core/domain/useCases/validators/NewResourceValidator.ts @@ -0,0 +1,5 @@ +import { ResourceValidationError } from './errors/ResourceValidationError'; + +export interface NewResourceValidator { + validate(resource: T): Promise; +} diff --git a/src/core/domain/useCases/validators/errors/EmptyFieldError.ts b/src/core/domain/useCases/validators/errors/EmptyFieldError.ts new file mode 100644 index 00000000..c098508c --- /dev/null +++ b/src/core/domain/useCases/validators/errors/EmptyFieldError.ts @@ -0,0 +1,7 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class EmptyFieldError extends FieldValidationError { + constructor(field: string) { + super(field, 'The field should not be empty.'); + } +} diff --git a/src/core/domain/useCases/validators/errors/FieldValidationError.ts b/src/core/domain/useCases/validators/errors/FieldValidationError.ts new file mode 100644 index 00000000..4c58e0bf --- /dev/null +++ b/src/core/domain/useCases/validators/errors/FieldValidationError.ts @@ -0,0 +1,11 @@ +import { ResourceValidationError } from './ResourceValidationError'; + +export class FieldValidationError extends ResourceValidationError { + constructor(field: string, reason?: string) { + let message = `There was an error when validating the field ${field}.`; + if (reason) { + message += ` Reason was: ${reason}`; + } + super(message); + } +} diff --git a/src/core/domain/useCases/validators/errors/ResourceValidationError.ts b/src/core/domain/useCases/validators/errors/ResourceValidationError.ts new file mode 100644 index 00000000..4d4f7cbb --- /dev/null +++ b/src/core/domain/useCases/validators/errors/ResourceValidationError.ts @@ -0,0 +1,5 @@ +export class ResourceValidationError extends Error { + constructor(message: string) { + super(message); + } +} diff --git a/src/datasets/domain/models/NewDataset.ts b/src/datasets/domain/models/NewDataset.ts new file mode 100644 index 00000000..5321ae32 --- /dev/null +++ b/src/datasets/domain/models/NewDataset.ts @@ -0,0 +1,9 @@ +import { Author, DatasetContact, DatasetDescription } from './Dataset'; + +export interface NewDataset { + title: string; + authors: Author[]; + contacts: DatasetContact[]; + descriptions: DatasetDescription[]; + subjects: string[]; +} diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index bc16c3f7..698fc261 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -2,6 +2,7 @@ import { Dataset } from '../models/Dataset'; import { DatasetUserPermissions } from '../models/DatasetUserPermissions'; import { DatasetLock } from '../models/DatasetLock'; import { DatasetPreviewSubset } from '../models/DatasetPreviewSubset'; +import { NewDataset } from '../models/NewDataset'; export interface IDatasetsRepository { getDatasetSummaryFieldNames(): Promise; @@ -12,4 +13,5 @@ export interface IDatasetsRepository { getDatasetUserPermissions(datasetId: number | string): Promise; getDatasetLocks(datasetId: number | string): Promise; getAllDatasetPreviews(limit?: number, offset?: number): Promise; + createDataset(newDataset: NewDataset): Promise; } diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts new file mode 100644 index 00000000..77b188af --- /dev/null +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -0,0 +1,20 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase'; +import { IDatasetsRepository } from '../repositories/IDatasetsRepository'; +import { NewDataset } from '../models/NewDataset'; +import { NewResourceValidator } from '../../../core/domain/useCases/validators/NewResourceValidator'; + +export class CreateDataset implements UseCase { + private datasetsRepository: IDatasetsRepository; + private newDatasetValidator: NewResourceValidator; + + constructor(datasetsRepository: IDatasetsRepository, newDatasetValidator: NewResourceValidator) { + this.datasetsRepository = datasetsRepository; + this.newDatasetValidator = newDatasetValidator; + } + + async execute(newDataset: NewDataset): Promise { + return await this.newDatasetValidator.validate(newDataset).then(async () => { + return await this.datasetsRepository.createDataset(newDataset); + }); + } +} diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts new file mode 100644 index 00000000..73678457 --- /dev/null +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -0,0 +1,27 @@ +import { NewDataset } from '../../models/NewDataset'; +import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; +import { IMetadataBlocksRepository } from '../../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; +import { MetadataBlock } from '../../../../metadataBlocks'; +import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; + +export class NewDatasetValidator implements NewResourceValidator { + private metadataBlockRepository: IMetadataBlocksRepository; + + constructor(metadataBlockRepository: IMetadataBlocksRepository) { + this.metadataBlockRepository = metadataBlockRepository; + } + + async validate(resource: NewDataset): Promise { + console.log(resource); + return await this.metadataBlockRepository + .getMetadataBlockByName('citation') + .then((citationMetadataBlock: MetadataBlock) => { + console.log(citationMetadataBlock); + // TODO apply validation based on citation metadata block info + // missing field -> throw + }) + .catch((error) => { + throw new ResourceValidationError(error); + }); + } +} diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index 541f1698..aae73f21 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -8,6 +8,7 @@ import { DatasetLock } from '../../domain/models/DatasetLock'; import { transformDatasetLocksResponseToDatasetLocks } from './transformers/datasetLocksTransformers'; import { transformDatasetPreviewsResponseToDatasetPreviewSubset } from './transformers/datasetPreviewsTransformers'; import { DatasetPreviewSubset } from '../../domain/models/DatasetPreviewSubset'; +import { NewDataset } from '../../domain/models/NewDataset'; export interface GetAllDatasetPreviewsQueryParams { per_page?: number; @@ -106,4 +107,8 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi throw error; }); } + + public async createDataset(newDataset: NewDataset): Promise { + console.log(newDataset); + } } diff --git a/src/metadataBlocks/domain/models/MetadataBlock.ts b/src/metadataBlocks/domain/models/MetadataBlock.ts index b95bf799..dc60b5b1 100644 --- a/src/metadataBlocks/domain/models/MetadataBlock.ts +++ b/src/metadataBlocks/domain/models/MetadataBlock.ts @@ -14,6 +14,9 @@ export interface MetadataFieldInfo { description: string; multiple: boolean; isControlledVocabulary: boolean; + controlledVocabularyValues?: string[]; displayFormat: string; childMetadataFields?: Record; + isRequired: boolean; + displayOrder: number; } diff --git a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts index 9ed9c1ba..35685156 100644 --- a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts +++ b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts @@ -31,6 +31,9 @@ const transformPayloadMetadataFieldInfo = ( multiple: metadataFieldInfoPayload.multiple, isControlledVocabulary: metadataFieldInfoPayload.isControlledVocabulary, displayFormat: metadataFieldInfoPayload.displayFormat, + // TODO + isRequired: true, + displayOrder: 0, }; if (!isChild && metadataFieldInfoPayload.hasOwnProperty('childFields')) { const childMetadataFieldsPayload = metadataFieldInfoPayload.childFields; diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts new file mode 100644 index 00000000..53f64ba3 --- /dev/null +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -0,0 +1,29 @@ +import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; + +export const createNewDatasetModel = (): NewDataset => { + return { + title: 'test', + authors: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + ], + subjects: ['Subject1', 'Subject2'], + contacts: [ + { + datasetContactName: 'Admin, Dataverse', + datasetContactEmail: 'someemail@test.com', + }, + ], + descriptions: [ + { + dsDescriptionValue: 'test', + }, + ], + }; +}; diff --git a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts index 54b12cfe..4ff63661 100644 --- a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts +++ b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts @@ -16,6 +16,8 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, }, testField2: { name: 'testName2', @@ -27,6 +29,8 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: true, isControlledVocabulary: false, displayFormat: '', + isRequired: true, + displayOrder: 0, childMetadataFields: { testField3: { name: 'testName3', @@ -38,6 +42,8 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, }, testField4: { name: 'testName4', @@ -49,6 +55,8 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, }, }, }, @@ -72,6 +80,8 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, }, testField2: { name: 'testName2', @@ -83,6 +93,8 @@ export const createMetadataBlockPayload = (): any => { multiple: true, isControlledVocabulary: false, displayFormat: '', + isRequired: true, + displayOrder: 0, childFields: { testField3: { name: 'testName3', @@ -94,6 +106,8 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, }, testField4: { name: 'testName4', @@ -105,6 +119,8 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, }, }, }, diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts new file mode 100644 index 00000000..f0b7ec2a --- /dev/null +++ b/test/unit/datasets/CreateDataset.test.ts @@ -0,0 +1,76 @@ +import { CreateDataset } from '../../../src/datasets/domain/useCases/CreateDataset'; +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository'; +import { assert, createSandbox, SinonSandbox } from 'sinon'; +import { NewResourceValidator } from '../../../src/core/domain/useCases/validators/NewResourceValidator'; +import { createNewDatasetModel } from '../../testHelpers/datasets/newDatasetHelper'; +import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; +import { ResourceValidationError } from '../../../src/core/domain/useCases/validators/errors/ResourceValidationError'; +import { WriteError } from '../../../src'; + +describe('execute', () => { + const sandbox: SinonSandbox = createSandbox(); + const testDataset = createNewDatasetModel(); + + afterEach(() => { + sandbox.restore(); + }); + + test('should call repository when validation is successful', async () => { + const datasetsRepositoryStub = {}; + const createDatasetStub = sandbox.stub(); + datasetsRepositoryStub.createDataset = createDatasetStub; + + const newDatasetValidatorMock = >{}; + const validateMock = sandbox.stub().resolves(); + newDatasetValidatorMock.validate = validateMock; + + const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorMock); + + await sut.execute(testDataset); + + assert.calledWithExactly(validateMock, testDataset); + assert.calledWithExactly(createDatasetStub, testDataset); + + assert.callOrder(validateMock, createDatasetStub); + }); + + test('should throw ResourceValidationError and not call repository when validation is unsuccessful', async () => { + const datasetsRepositoryStub = {}; + const createDatasetStub = sandbox.stub(); + datasetsRepositoryStub.createDataset = createDatasetStub; + + const newDatasetValidatorMock = >{}; + const testValidationError = new ResourceValidationError('Test error'); + const validateMock = sandbox.stub().throwsException(testValidationError); + newDatasetValidatorMock.validate = validateMock; + + const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorMock); + let actualError: ResourceValidationError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testValidationError); + + assert.calledWithExactly(validateMock, testDataset); + assert.notCalled(createDatasetStub); + }); + + test('should throw WriteError when validation is successful and repository raises an error', async () => { + const datasetsRepositoryMock = {}; + const testWriteError = new WriteError('Test error'); + const createDatasetMock = sandbox.stub().throwsException(testWriteError); + datasetsRepositoryMock.createDataset = createDatasetMock; + + const newDatasetValidatorMock = >{}; + const validateMock = sandbox.stub().resolves(); + newDatasetValidatorMock.validate = validateMock; + + const sut = new CreateDataset(datasetsRepositoryMock, newDatasetValidatorMock); + let actualError: ResourceValidationError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testWriteError); + + assert.calledWithExactly(validateMock, testDataset); + assert.calledWithExactly(createDatasetMock, testDataset); + + assert.callOrder(validateMock, createDatasetMock); + }); +}); From 3a24289cbec76992d60068652b351f9a4bf28a4f Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 10 Jan 2024 14:03:55 +0000 Subject: [PATCH 05/96] Changed: NewDataset model properties --- src/datasets/domain/models/NewDataset.ts | 17 +++++---- test/testHelpers/datasets/newDatasetHelper.ts | 35 ++++++++----------- 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/src/datasets/domain/models/NewDataset.ts b/src/datasets/domain/models/NewDataset.ts index 5321ae32..36b35c19 100644 --- a/src/datasets/domain/models/NewDataset.ts +++ b/src/datasets/domain/models/NewDataset.ts @@ -1,9 +1,14 @@ -import { Author, DatasetContact, DatasetDescription } from './Dataset'; +import { DatasetMetadataSubField } from './Dataset'; export interface NewDataset { - title: string; - authors: Author[]; - contacts: DatasetContact[]; - descriptions: DatasetDescription[]; - subjects: string[]; + metadataBlockValues: NewDatasetMetadataBlockValues[]; } + +export interface NewDatasetMetadataBlockValues { + name: string; + fields: NewDatasetMetadataFields; +} + +export type NewDatasetMetadataFields = Record; + +export type NewDatasetMetadataFieldValue = string | string[] | DatasetMetadataSubField | DatasetMetadataSubField[]; diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 53f64ba3..9811951b 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -2,27 +2,22 @@ import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; export const createNewDatasetModel = (): NewDataset => { return { - title: 'test', - authors: [ + metadataBlockValues: [ { - authorName: 'Admin, Dataverse', - authorAffiliation: 'Dataverse.org', - }, - { - authorName: 'Owner, Dataverse', - authorAffiliation: 'Dataverse.org', - }, - ], - subjects: ['Subject1', 'Subject2'], - contacts: [ - { - datasetContactName: 'Admin, Dataverse', - datasetContactEmail: 'someemail@test.com', - }, - ], - descriptions: [ - { - dsDescriptionValue: 'test', + name: 'citation', + fields: { + title: 'test dataset', + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + ], + }, }, ], }; From 644f228b788cd7334c611533b88b08035a64f205 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 12 Jan 2024 09:32:30 +0000 Subject: [PATCH 06/96] Stash: NewDatasetValidator logic WIP --- .../validators/errors/EmptyFieldError.ts | 4 +- .../validators/errors/FieldValidationError.ts | 28 +++++++- src/datasets/domain/models/NewDataset.ts | 10 ++- .../validators/NewDatasetValidator.ts | 38 +++++++---- test/integration/environment/.env | 4 +- test/testHelpers/datasets/newDatasetHelper.ts | 65 +++++++++++++++++++ test/unit/datasets/CreateDataset.test.ts | 38 +++++------ .../unit/datasets/NewDatasetValidator.test.ts | 24 +++++++ 8 files changed, 169 insertions(+), 42 deletions(-) create mode 100644 test/unit/datasets/NewDatasetValidator.test.ts diff --git a/src/core/domain/useCases/validators/errors/EmptyFieldError.ts b/src/core/domain/useCases/validators/errors/EmptyFieldError.ts index c098508c..52c3ae80 100644 --- a/src/core/domain/useCases/validators/errors/EmptyFieldError.ts +++ b/src/core/domain/useCases/validators/errors/EmptyFieldError.ts @@ -1,7 +1,7 @@ import { FieldValidationError } from './FieldValidationError'; export class EmptyFieldError extends FieldValidationError { - constructor(field: string) { - super(field, 'The field should not be empty.'); + constructor(metadataFieldName: string, citationBlockName: string, parentMetadataFieldName?: string) { + super(metadataFieldName, citationBlockName, parentMetadataFieldName, 'The field should not be empty.'); } } diff --git a/src/core/domain/useCases/validators/errors/FieldValidationError.ts b/src/core/domain/useCases/validators/errors/FieldValidationError.ts index 4c58e0bf..093b28a7 100644 --- a/src/core/domain/useCases/validators/errors/FieldValidationError.ts +++ b/src/core/domain/useCases/validators/errors/FieldValidationError.ts @@ -1,11 +1,33 @@ import { ResourceValidationError } from './ResourceValidationError'; export class FieldValidationError extends ResourceValidationError { - constructor(field: string, reason?: string) { - let message = `There was an error when validating the field ${field}.`; + private citationBlockName: string; + private metadataFieldName: string; + private parentMetadataFieldName?: string; + + constructor(metadataFieldName: string, citationBlockName: string, parentMetadataFieldName?: string, reason?: string) { + let message = `There was an error when validating the field ${metadataFieldName} from metadata block ${citationBlockName}`; + if (metadataFieldName) { + message += ` with parent field ${parentMetadataFieldName}`; + } if (reason) { - message += ` Reason was: ${reason}`; + message += `. Reason was: ${reason}`; } super(message); + this.citationBlockName = citationBlockName; + this.metadataFieldName = metadataFieldName; + this.parentMetadataFieldName = parentMetadataFieldName; + } + + getCitationBlockName(): string { + return this.citationBlockName; + } + + getMetadataFieldName(): string { + return this.metadataFieldName; + } + + getParentMetadataFieldName(): string | undefined { + return this.parentMetadataFieldName; } } diff --git a/src/datasets/domain/models/NewDataset.ts b/src/datasets/domain/models/NewDataset.ts index 36b35c19..d7e587ea 100644 --- a/src/datasets/domain/models/NewDataset.ts +++ b/src/datasets/domain/models/NewDataset.ts @@ -1,5 +1,3 @@ -import { DatasetMetadataSubField } from './Dataset'; - export interface NewDataset { metadataBlockValues: NewDatasetMetadataBlockValues[]; } @@ -11,4 +9,10 @@ export interface NewDatasetMetadataBlockValues { export type NewDatasetMetadataFields = Record; -export type NewDatasetMetadataFieldValue = string | string[] | DatasetMetadataSubField | DatasetMetadataSubField[]; +export type NewDatasetMetadataFieldValue = + | string + | string[] + | NewDatasetMetadataSubFieldValue + | NewDatasetMetadataSubFieldValue[]; + +export type NewDatasetMetadataSubFieldValue = Record; diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 73678457..0be7381d 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -1,8 +1,10 @@ -import { NewDataset } from '../../models/NewDataset'; +import { NewDataset, NewDatasetMetadataFieldValue } from '../../models/NewDataset'; import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; import { IMetadataBlocksRepository } from '../../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; -import { MetadataBlock } from '../../../../metadataBlocks'; +import { MetadataBlock, MetadataFieldInfo } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; +import { WriteError } from '../../../../core'; +import { EmptyFieldError } from '../../../../core/domain/useCases/validators/errors/EmptyFieldError'; export class NewDatasetValidator implements NewResourceValidator { private metadataBlockRepository: IMetadataBlocksRepository; @@ -12,16 +14,26 @@ export class NewDatasetValidator implements NewResourceValidator { } async validate(resource: NewDataset): Promise { - console.log(resource); - return await this.metadataBlockRepository - .getMetadataBlockByName('citation') - .then((citationMetadataBlock: MetadataBlock) => { - console.log(citationMetadataBlock); - // TODO apply validation based on citation metadata block info - // missing field -> throw - }) - .catch((error) => { - throw new ResourceValidationError(error); - }); + for (const metadataBlockValues of resource.metadataBlockValues) { + const newDatasetMetadataBlockName = metadataBlockValues.name; + await this.metadataBlockRepository + .getMetadataBlockByName(newDatasetMetadataBlockName) + .then((metadataBlock: MetadataBlock) => { + Object.keys(metadataBlock.metadataFields).map((metadataFieldKey) => { + const metadataFieldInfo: MetadataFieldInfo = metadataBlock.metadataFields[metadataFieldKey]; + const newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue = + metadataBlockValues.fields[metadataFieldKey]; + if (metadataFieldInfo.isRequired && newDatasetMetadataFieldValue == undefined) { + throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName); + } + if (metadataFieldInfo.childMetadataFields != undefined) { + // TODO: child fields validation + } + }); + }) + .catch((error: WriteError) => { + throw new ResourceValidationError(error.message); + }); + } } } diff --git a/test/integration/environment/.env b/test/integration/environment/.env index 80e9a14e..2141e353 100644 --- a/test/integration/environment/.env +++ b/test/integration/environment/.env @@ -1,6 +1,6 @@ POSTGRES_VERSION=13 DATAVERSE_DB_USER=dataverse SOLR_VERSION=9.3.0 -DATAVERSE_IMAGE_REGISTRY=docker.io -DATAVERSE_IMAGE_TAG=unstable +DATAVERSE_IMAGE_REGISTRY=ghcr.io +DATAVERSE_IMAGE_TAG=10216-metadatablocks-payload-extension DATAVERSE_BOOTSTRAP_TIMEOUT=5m diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 9811951b..92b7c9c9 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -1,4 +1,5 @@ import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; +import { MetadataBlock } from '../../../src'; export const createNewDatasetModel = (): NewDataset => { return { @@ -22,3 +23,67 @@ export const createNewDatasetModel = (): NewDataset => { ], }; }; + +export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { + return { + id: 1, + name: 'citation', + displayName: 'Citation Metadata', + metadataFields: { + title: { + name: 'title', + displayName: 'title', + title: 'title', + type: 'DatasetField', + watermark: 'watermark', + description: 'description', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + }, + author: { + name: 'author', + displayName: 'author', + title: 'author', + type: 'NONE', + watermark: 'watermark', + description: 'description', + multiple: true, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 1, + childMetadataFields: { + authorName: { + name: 'authorName', + displayName: 'author name', + title: 'author name', + type: 'TEXT', + watermark: 'watermark', + description: 'description', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 2, + }, + authorAffiliation: { + name: 'authorAffiliation', + displayName: 'author affiliation', + title: 'author affiliation', + type: 'TEXT', + watermark: 'watermark', + description: 'descriprion', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: false, + displayOrder: 3, + }, + }, + }, + }, + }; +}; diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts index f0b7ec2a..78897bd6 100644 --- a/test/unit/datasets/CreateDataset.test.ts +++ b/test/unit/datasets/CreateDataset.test.ts @@ -20,18 +20,18 @@ describe('execute', () => { const createDatasetStub = sandbox.stub(); datasetsRepositoryStub.createDataset = createDatasetStub; - const newDatasetValidatorMock = >{}; - const validateMock = sandbox.stub().resolves(); - newDatasetValidatorMock.validate = validateMock; + const newDatasetValidatorStub = >{}; + const validateStub = sandbox.stub().resolves(); + newDatasetValidatorStub.validate = validateStub; - const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorMock); + const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorStub); await sut.execute(testDataset); - assert.calledWithExactly(validateMock, testDataset); + assert.calledWithExactly(validateStub, testDataset); assert.calledWithExactly(createDatasetStub, testDataset); - assert.callOrder(validateMock, createDatasetStub); + assert.callOrder(validateStub, createDatasetStub); }); test('should throw ResourceValidationError and not call repository when validation is unsuccessful', async () => { @@ -39,38 +39,38 @@ describe('execute', () => { const createDatasetStub = sandbox.stub(); datasetsRepositoryStub.createDataset = createDatasetStub; - const newDatasetValidatorMock = >{}; + const newDatasetValidatorStub = >{}; const testValidationError = new ResourceValidationError('Test error'); - const validateMock = sandbox.stub().throwsException(testValidationError); - newDatasetValidatorMock.validate = validateMock; + const validateStub = sandbox.stub().throwsException(testValidationError); + newDatasetValidatorStub.validate = validateStub; - const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorMock); + const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorStub); let actualError: ResourceValidationError = undefined; await sut.execute(testDataset).catch((e) => (actualError = e)); assert.match(actualError, testValidationError); - assert.calledWithExactly(validateMock, testDataset); + assert.calledWithExactly(validateStub, testDataset); assert.notCalled(createDatasetStub); }); test('should throw WriteError when validation is successful and repository raises an error', async () => { - const datasetsRepositoryMock = {}; + const datasetsRepositoryStub = {}; const testWriteError = new WriteError('Test error'); - const createDatasetMock = sandbox.stub().throwsException(testWriteError); - datasetsRepositoryMock.createDataset = createDatasetMock; + const createDatasetStub = sandbox.stub().throwsException(testWriteError); + datasetsRepositoryStub.createDataset = createDatasetStub; - const newDatasetValidatorMock = >{}; + const newDatasetValidatorStub = >{}; const validateMock = sandbox.stub().resolves(); - newDatasetValidatorMock.validate = validateMock; + newDatasetValidatorStub.validate = validateMock; - const sut = new CreateDataset(datasetsRepositoryMock, newDatasetValidatorMock); + const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorStub); let actualError: ResourceValidationError = undefined; await sut.execute(testDataset).catch((e) => (actualError = e)); assert.match(actualError, testWriteError); assert.calledWithExactly(validateMock, testDataset); - assert.calledWithExactly(createDatasetMock, testDataset); + assert.calledWithExactly(createDatasetStub, testDataset); - assert.callOrder(validateMock, createDatasetMock); + assert.callOrder(validateMock, createDatasetStub); }); }); diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts new file mode 100644 index 00000000..0f3331c5 --- /dev/null +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -0,0 +1,24 @@ +import { NewDatasetValidator } from '../../../src/datasets/domain/useCases/validators/NewDatasetValidator'; +import { createSandbox, SinonSandbox } from 'sinon'; +import { createNewDatasetModel, createNewDatasetMetadataBlockModel } from '../../testHelpers/datasets/newDatasetHelper'; +import { fail } from 'assert'; +import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; + +describe('execute', () => { + const sandbox: SinonSandbox = createSandbox(); + + afterEach(() => { + sandbox.restore(); + }); + + test('should not raise exception when new dataset is valid', async () => { + const testNewDataset = createNewDatasetModel(); + const testMetadataBlock = createNewDatasetMetadataBlockModel(); + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlock); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + const sut = new NewDatasetValidator(metadataBlocksRepositoryStub); + + await sut.validate(testNewDataset).catch((e) => fail(e)); + }); +}); From 2c356338f94810ca3b420610b12c92f5c51e4b9b Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 12 Jan 2024 12:31:05 +0000 Subject: [PATCH 07/96] Added: test cases for NewDatasetValidator empty field error handling --- .../validators/errors/FieldValidationError.ts | 20 +++-------- .../validators/NewDatasetValidator.ts | 35 ++++++++---------- test/testHelpers/datasets/newDatasetHelper.ts | 13 +++++++ .../unit/datasets/NewDatasetValidator.test.ts | 36 +++++++++++++++++-- 4 files changed, 65 insertions(+), 39 deletions(-) diff --git a/src/core/domain/useCases/validators/errors/FieldValidationError.ts b/src/core/domain/useCases/validators/errors/FieldValidationError.ts index 093b28a7..ff1c830b 100644 --- a/src/core/domain/useCases/validators/errors/FieldValidationError.ts +++ b/src/core/domain/useCases/validators/errors/FieldValidationError.ts @@ -1,13 +1,13 @@ import { ResourceValidationError } from './ResourceValidationError'; export class FieldValidationError extends ResourceValidationError { - private citationBlockName: string; - private metadataFieldName: string; - private parentMetadataFieldName?: string; + citationBlockName: string; + metadataFieldName: string; + parentMetadataFieldName?: string; constructor(metadataFieldName: string, citationBlockName: string, parentMetadataFieldName?: string, reason?: string) { let message = `There was an error when validating the field ${metadataFieldName} from metadata block ${citationBlockName}`; - if (metadataFieldName) { + if (parentMetadataFieldName) { message += ` with parent field ${parentMetadataFieldName}`; } if (reason) { @@ -18,16 +18,4 @@ export class FieldValidationError extends ResourceValidationError { this.metadataFieldName = metadataFieldName; this.parentMetadataFieldName = parentMetadataFieldName; } - - getCitationBlockName(): string { - return this.citationBlockName; - } - - getMetadataFieldName(): string { - return this.metadataFieldName; - } - - getParentMetadataFieldName(): string | undefined { - return this.parentMetadataFieldName; - } } diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 0be7381d..9c5fd433 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -1,9 +1,8 @@ import { NewDataset, NewDatasetMetadataFieldValue } from '../../models/NewDataset'; import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; import { IMetadataBlocksRepository } from '../../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; -import { MetadataBlock, MetadataFieldInfo } from '../../../../metadataBlocks'; +import { MetadataFieldInfo } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; -import { WriteError } from '../../../../core'; import { EmptyFieldError } from '../../../../core/domain/useCases/validators/errors/EmptyFieldError'; export class NewDatasetValidator implements NewResourceValidator { @@ -16,24 +15,20 @@ export class NewDatasetValidator implements NewResourceValidator { async validate(resource: NewDataset): Promise { for (const metadataBlockValues of resource.metadataBlockValues) { const newDatasetMetadataBlockName = metadataBlockValues.name; - await this.metadataBlockRepository - .getMetadataBlockByName(newDatasetMetadataBlockName) - .then((metadataBlock: MetadataBlock) => { - Object.keys(metadataBlock.metadataFields).map((metadataFieldKey) => { - const metadataFieldInfo: MetadataFieldInfo = metadataBlock.metadataFields[metadataFieldKey]; - const newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue = - metadataBlockValues.fields[metadataFieldKey]; - if (metadataFieldInfo.isRequired && newDatasetMetadataFieldValue == undefined) { - throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName); - } - if (metadataFieldInfo.childMetadataFields != undefined) { - // TODO: child fields validation - } - }); - }) - .catch((error: WriteError) => { - throw new ResourceValidationError(error.message); - }); + + const metadataBlock = await this.metadataBlockRepository.getMetadataBlockByName(newDatasetMetadataBlockName); + for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { + const metadataFieldInfo: MetadataFieldInfo = metadataBlock.metadataFields[metadataFieldKey]; + const newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue = metadataBlockValues.fields[metadataFieldKey]; + + if (metadataFieldInfo.isRequired && newDatasetMetadataFieldValue == undefined) { + throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName); + } + + if (metadataFieldInfo.childMetadataFields != undefined) { + // TODO: child fields validation + } + } } } } diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 92b7c9c9..2cf5a77f 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -24,6 +24,19 @@ export const createNewDatasetModel = (): NewDataset => { }; }; +export const createNewDatasetModelWithoutRequiredField = (): NewDataset => { + return { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'test dataset', + }, + }, + ], + }; +}; + export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { return { id: 1, diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index 0f3331c5..d029a50c 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -1,8 +1,13 @@ import { NewDatasetValidator } from '../../../src/datasets/domain/useCases/validators/NewDatasetValidator'; -import { createSandbox, SinonSandbox } from 'sinon'; -import { createNewDatasetModel, createNewDatasetMetadataBlockModel } from '../../testHelpers/datasets/newDatasetHelper'; +import { assert, createSandbox, SinonSandbox } from 'sinon'; +import { + createNewDatasetModel, + createNewDatasetMetadataBlockModel, + createNewDatasetModelWithoutRequiredField, +} from '../../testHelpers/datasets/newDatasetHelper'; import { fail } from 'assert'; import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; +import { EmptyFieldError } from '../../../src/core/domain/useCases/validators/errors/EmptyFieldError'; describe('execute', () => { const sandbox: SinonSandbox = createSandbox(); @@ -11,7 +16,7 @@ describe('execute', () => { sandbox.restore(); }); - test('should not raise exception when new dataset is valid', async () => { + test('should not raise validation error when new dataset is valid', async () => { const testNewDataset = createNewDatasetModel(); const testMetadataBlock = createNewDatasetMetadataBlockModel(); const metadataBlocksRepositoryStub = {}; @@ -21,4 +26,29 @@ describe('execute', () => { await sut.validate(testNewDataset).catch((e) => fail(e)); }); + + test('should raise empty field error when a first level field is missing', async () => { + const testNewDataset = createNewDatasetModelWithoutRequiredField(); + const testMetadataBlock = createNewDatasetMetadataBlockModel(); + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlock); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + const sut = new NewDatasetValidator(metadataBlocksRepositoryStub); + + await sut + .validate(testNewDataset) + .then(() => { + fail('Validation should fail'); + }) + .catch((error) => { + const emptyFieldError = error as EmptyFieldError; + assert.match(emptyFieldError.citationBlockName, 'citation'); + assert.match(emptyFieldError.metadataFieldName, 'author'); + assert.match(emptyFieldError.parentMetadataFieldName, undefined); + assert.match( + emptyFieldError.message, + 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', + ); + }); + }); }); From 1b4b29a9160f6b8f396576ecea0bfb9b99f14fcb Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 12 Jan 2024 14:23:29 +0000 Subject: [PATCH 08/96] Stash: multiple field value validation WIP --- .../validators/NewDatasetValidator.ts | 69 +++++++++++++++++++ test/testHelpers/datasets/newDatasetHelper.ts | 49 +++++++++---- .../unit/datasets/NewDatasetValidator.test.ts | 33 ++++++++- 3 files changed, 135 insertions(+), 16 deletions(-) diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 9c5fd433..7ed745a2 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -4,6 +4,7 @@ import { IMetadataBlocksRepository } from '../../../../metadataBlocks/domain/rep import { MetadataFieldInfo } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; import { EmptyFieldError } from '../../../../core/domain/useCases/validators/errors/EmptyFieldError'; +import { FieldValidationError } from '../../../../core/domain/useCases/validators/errors/FieldValidationError'; export class NewDatasetValidator implements NewResourceValidator { private metadataBlockRepository: IMetadataBlocksRepository; @@ -25,10 +26,78 @@ export class NewDatasetValidator implements NewResourceValidator { throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName); } + this.validateMetadataFieldValueType( + metadataFieldInfo, + metadataFieldKey, + newDatasetMetadataFieldValue, + newDatasetMetadataBlockName, + ); + if (metadataFieldInfo.childMetadataFields != undefined) { // TODO: child fields validation } } } } + + validateMetadataFieldValueType( + metadataFieldInfo: MetadataFieldInfo, + metadataFieldKey: string, + newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, + newDatasetMetadataBlockName: string, + ): void { + if (metadataFieldInfo.multiple) { + if (!Array.isArray(newDatasetMetadataFieldValue)) { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + undefined, + 'Expecting an array of values.', + ); + } + if (this.isValidArrayType(newDatasetMetadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + undefined, + 'Expecting an array of sub fields, not strings.', + ); + } else if (this.isValidArrayType(newDatasetMetadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + undefined, + 'Expecting an array of strings, not sub fields.', + ); + } else if ( + !this.isValidArrayType(newDatasetMetadataFieldValue, 'object') && + !this.isValidArrayType(newDatasetMetadataFieldValue, 'string') + ) { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + undefined, + 'The provided array of values is not valid.', + ); + } + } + } + + private isValidArrayType( + newDatasetMetadataFieldValue: Array, + expectedType: 'string' | 'object', + ): boolean { + return newDatasetMetadataFieldValue.every( + (item: string | NewDatasetMetadataFieldValue) => typeof item === expectedType, + ); + } + + private createValidationError( + metadataFieldKey: string, + newDatasetMetadataBlockName: string, + parentMetadataFieldName: string | undefined, + reason: string, + ): FieldValidationError { + return new FieldValidationError(metadataFieldKey, newDatasetMetadataBlockName, parentMetadataFieldName, reason); + } } diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 2cf5a77f..9d99eb3b 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -1,36 +1,59 @@ -import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; +import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; import { MetadataBlock } from '../../../src'; -export const createNewDatasetModel = (): NewDataset => { +export const createNewDatasetModel = (authorFieldValue?: NewDatasetMetadataFieldValue | string): NewDataset => { + const validAuthorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + ]; + return { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'test dataset', + author: authorFieldValue !== undefined ? authorFieldValue : validAuthorFieldValue, + }, + }, + ], + }; +}; + +export const createNewDatasetModelWithoutFirstLevelRequiredField = (): NewDataset => { return { metadataBlockValues: [ { name: 'citation', fields: { title: 'test dataset', - author: [ - { - authorName: 'Admin, Dataverse', - authorAffiliation: 'Dataverse.org', - }, - { - authorName: 'Owner, Dataverse', - authorAffiliation: 'Dataverse.org', - }, - ], }, }, ], }; }; -export const createNewDatasetModelWithoutRequiredField = (): NewDataset => { +export const createNewDatasetModelWithoutSecondLevelRequiredField = (): NewDataset => { return { metadataBlockValues: [ { name: 'citation', fields: { title: 'test dataset', + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorAffiliation: 'Dataverse.org', + }, + ], }, }, ], diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index d029a50c..d0bba684 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -3,11 +3,12 @@ import { assert, createSandbox, SinonSandbox } from 'sinon'; import { createNewDatasetModel, createNewDatasetMetadataBlockModel, - createNewDatasetModelWithoutRequiredField, + createNewDatasetModelWithoutFirstLevelRequiredField, } from '../../testHelpers/datasets/newDatasetHelper'; import { fail } from 'assert'; import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { EmptyFieldError } from '../../../src/core/domain/useCases/validators/errors/EmptyFieldError'; +import { FieldValidationError } from '../../../src/core/domain/useCases/validators/errors/FieldValidationError'; describe('execute', () => { const sandbox: SinonSandbox = createSandbox(); @@ -27,8 +28,8 @@ describe('execute', () => { await sut.validate(testNewDataset).catch((e) => fail(e)); }); - test('should raise empty field error when a first level field is missing', async () => { - const testNewDataset = createNewDatasetModelWithoutRequiredField(); + test('should raise an empty field error when a first level field is missing', async () => { + const testNewDataset = createNewDatasetModelWithoutFirstLevelRequiredField(); const testMetadataBlock = createNewDatasetMetadataBlockModel(); const metadataBlocksRepositoryStub = {}; const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlock); @@ -51,4 +52,30 @@ describe('execute', () => { ); }); }); + + test('should raise an error when the provided field value for a multiple field is a string', async () => { + const invalidAuthorFieldValue = 'invalidValue'; + const testNewDataset = createNewDatasetModel(invalidAuthorFieldValue); + const testMetadataBlock = createNewDatasetMetadataBlockModel(); + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlock); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + const sut = new NewDatasetValidator(metadataBlocksRepositoryStub); + + await sut + .validate(testNewDataset) + .then(() => { + fail('Validation should fail'); + }) + .catch((error) => { + const emptyFieldError = error as FieldValidationError; + assert.match(emptyFieldError.citationBlockName, 'citation'); + assert.match(emptyFieldError.metadataFieldName, 'author'); + assert.match(emptyFieldError.parentMetadataFieldName, undefined); + assert.match( + emptyFieldError.message, + 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of values.', + ); + }); + }); }); From 32ce475b80e4927cff34aa9f82bfdcad3a6d8f31 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 12 Jan 2024 14:30:04 +0000 Subject: [PATCH 09/96] Refactor: NewDatasetValidator unit test --- .../unit/datasets/NewDatasetValidator.test.ts | 32 ++++++++----------- 1 file changed, 14 insertions(+), 18 deletions(-) diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index d0bba684..7816e5be 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -17,24 +17,24 @@ describe('execute', () => { sandbox.restore(); }); - test('should not raise validation error when new dataset is valid', async () => { - const testNewDataset = createNewDatasetModel(); + function setupMetadataBlocksRepositoryStub(): IMetadataBlocksRepository { const testMetadataBlock = createNewDatasetMetadataBlockModel(); const metadataBlocksRepositoryStub = {}; const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlock); metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; - const sut = new NewDatasetValidator(metadataBlocksRepositoryStub); + return metadataBlocksRepositoryStub; + } + + test('should not raise validation error when new dataset is valid', async () => { + const testNewDataset = createNewDatasetModel(); + const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); await sut.validate(testNewDataset).catch((e) => fail(e)); }); test('should raise an empty field error when a first level field is missing', async () => { const testNewDataset = createNewDatasetModelWithoutFirstLevelRequiredField(); - const testMetadataBlock = createNewDatasetMetadataBlockModel(); - const metadataBlocksRepositoryStub = {}; - const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlock); - metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; - const sut = new NewDatasetValidator(metadataBlocksRepositoryStub); + const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); await sut .validate(testNewDataset) @@ -56,11 +56,7 @@ describe('execute', () => { test('should raise an error when the provided field value for a multiple field is a string', async () => { const invalidAuthorFieldValue = 'invalidValue'; const testNewDataset = createNewDatasetModel(invalidAuthorFieldValue); - const testMetadataBlock = createNewDatasetMetadataBlockModel(); - const metadataBlocksRepositoryStub = {}; - const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlock); - metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; - const sut = new NewDatasetValidator(metadataBlocksRepositoryStub); + const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); await sut .validate(testNewDataset) @@ -68,12 +64,12 @@ describe('execute', () => { fail('Validation should fail'); }) .catch((error) => { - const emptyFieldError = error as FieldValidationError; - assert.match(emptyFieldError.citationBlockName, 'citation'); - assert.match(emptyFieldError.metadataFieldName, 'author'); - assert.match(emptyFieldError.parentMetadataFieldName, undefined); + const fieldValidationError = error as FieldValidationError; + assert.match(fieldValidationError.citationBlockName, 'citation'); + assert.match(fieldValidationError.metadataFieldName, 'author'); + assert.match(fieldValidationError.parentMetadataFieldName, undefined); assert.match( - emptyFieldError.message, + fieldValidationError.message, 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of values.', ); }); From 330689675ae0f6fc940a313972c604c06fd2bb78 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 15 Jan 2024 11:01:25 +0000 Subject: [PATCH 10/96] Added: test case to NewDatasetValidator and refactor --- .../unit/datasets/NewDatasetValidator.test.ts | 70 ++++++++++--------- 1 file changed, 36 insertions(+), 34 deletions(-) diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index 7816e5be..ed402972 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -9,6 +9,7 @@ import { fail } from 'assert'; import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { EmptyFieldError } from '../../../src/core/domain/useCases/validators/errors/EmptyFieldError'; import { FieldValidationError } from '../../../src/core/domain/useCases/validators/errors/FieldValidationError'; +import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; describe('execute', () => { const sandbox: SinonSandbox = createSandbox(); @@ -25,6 +26,25 @@ describe('execute', () => { return metadataBlocksRepositoryStub; } + async function runValidateExpectingFieldValidationError( + newDataset: NewDataset, + expectedErrorMessage: string, + ): Promise { + const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); + await sut + .validate(newDataset) + .then(() => { + fail('Validation should fail'); + }) + .catch((error) => { + const fieldValidationError = error as T; + assert.match(fieldValidationError.citationBlockName, 'citation'); + assert.match(fieldValidationError.metadataFieldName, 'author'); + assert.match(fieldValidationError.parentMetadataFieldName, undefined); + assert.match(fieldValidationError.message, expectedErrorMessage); + }); + } + test('should not raise validation error when new dataset is valid', async () => { const testNewDataset = createNewDatasetModel(); const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); @@ -33,45 +53,27 @@ describe('execute', () => { }); test('should raise an empty field error when a first level field is missing', async () => { - const testNewDataset = createNewDatasetModelWithoutFirstLevelRequiredField(); - const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); - - await sut - .validate(testNewDataset) - .then(() => { - fail('Validation should fail'); - }) - .catch((error) => { - const emptyFieldError = error as EmptyFieldError; - assert.match(emptyFieldError.citationBlockName, 'citation'); - assert.match(emptyFieldError.metadataFieldName, 'author'); - assert.match(emptyFieldError.parentMetadataFieldName, undefined); - assert.match( - emptyFieldError.message, - 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', - ); - }); + await runValidateExpectingFieldValidationError( + createNewDatasetModelWithoutFirstLevelRequiredField(), + 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', + ); }); test('should raise an error when the provided field value for a multiple field is a string', async () => { const invalidAuthorFieldValue = 'invalidValue'; const testNewDataset = createNewDatasetModel(invalidAuthorFieldValue); - const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of values.', + ); + }); - await sut - .validate(testNewDataset) - .then(() => { - fail('Validation should fail'); - }) - .catch((error) => { - const fieldValidationError = error as FieldValidationError; - assert.match(fieldValidationError.citationBlockName, 'citation'); - assert.match(fieldValidationError.metadataFieldName, 'author'); - assert.match(fieldValidationError.parentMetadataFieldName, undefined); - assert.match( - fieldValidationError.message, - 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of values.', - ); - }); + test('should raise an error when the provided field value is an array of strings and the field expects an array of objects', async () => { + const invalidAuthorFieldValue = ['invalidValue1', 'invalidValue2']; + const testNewDataset = createNewDatasetModel(invalidAuthorFieldValue); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of sub fields, not strings', + ); }); }); From fa3a6c3918bf61d22cf8b801bb63fc6425b48e74 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 15 Jan 2024 11:16:28 +0000 Subject: [PATCH 11/96] Added: NewDatasetValidator test case for field type error --- test/testHelpers/datasets/newDatasetHelper.ts | 20 ++++++++++++++- .../unit/datasets/NewDatasetValidator.test.ts | 25 ++++++++++++++++++- 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 9d99eb3b..0cf77d79 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -1,7 +1,10 @@ import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; import { MetadataBlock } from '../../../src'; -export const createNewDatasetModel = (authorFieldValue?: NewDatasetMetadataFieldValue | string): NewDataset => { +export const createNewDatasetModel = ( + authorFieldValue?: NewDatasetMetadataFieldValue | string, + alternativeTitleValue?: NewDatasetMetadataFieldValue | string, +): NewDataset => { const validAuthorFieldValue = [ { authorName: 'Admin, Dataverse', @@ -12,6 +15,7 @@ export const createNewDatasetModel = (authorFieldValue?: NewDatasetMetadataField authorAffiliation: 'Dataverse.org', }, ]; + const validAlternativeTitleValue = ['alternative1', 'alternative2']; return { metadataBlockValues: [ { @@ -19,6 +23,7 @@ export const createNewDatasetModel = (authorFieldValue?: NewDatasetMetadataField fields: { title: 'test dataset', author: authorFieldValue !== undefined ? authorFieldValue : validAuthorFieldValue, + alternativeTitle: alternativeTitleValue !== undefined ? alternativeTitleValue : validAlternativeTitleValue, }, }, ], @@ -120,6 +125,19 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { }, }, }, + alternativeTitle: { + name: 'alternativeTitle', + displayName: 'Alternative Title', + title: 'Alternative Title', + type: 'TEXT', + watermark: '', + description: 'Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title', + multiple: true, + isControlledVocabulary: false, + displayFormat: '', + isRequired: true, + displayOrder: 4, + }, }, }; }; diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index ed402972..a67b3065 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -28,6 +28,7 @@ describe('execute', () => { async function runValidateExpectingFieldValidationError( newDataset: NewDataset, + expectedMetadataFieldName: string, expectedErrorMessage: string, ): Promise { const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); @@ -39,7 +40,7 @@ describe('execute', () => { .catch((error) => { const fieldValidationError = error as T; assert.match(fieldValidationError.citationBlockName, 'citation'); - assert.match(fieldValidationError.metadataFieldName, 'author'); + assert.match(fieldValidationError.metadataFieldName, expectedMetadataFieldName); assert.match(fieldValidationError.parentMetadataFieldName, undefined); assert.match(fieldValidationError.message, expectedErrorMessage); }); @@ -55,6 +56,7 @@ describe('execute', () => { test('should raise an empty field error when a first level field is missing', async () => { await runValidateExpectingFieldValidationError( createNewDatasetModelWithoutFirstLevelRequiredField(), + 'author', 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', ); }); @@ -64,6 +66,7 @@ describe('execute', () => { const testNewDataset = createNewDatasetModel(invalidAuthorFieldValue); await runValidateExpectingFieldValidationError( testNewDataset, + 'author', 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of values.', ); }); @@ -73,7 +76,27 @@ describe('execute', () => { const testNewDataset = createNewDatasetModel(invalidAuthorFieldValue); await runValidateExpectingFieldValidationError( testNewDataset, + 'author', 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of sub fields, not strings', ); }); + + test('should raise an error when the provided field value is an array of objects and the field expects an array of strings', async () => { + const invalidAlternativeTitleFieldValue = [ + { + invalidSubfield1: 'invalid value 1', + invalidSubfield2: 'invalid value 2', + }, + { + invalidSubfield1: 'invalid value 1', + invalidSubfield2: 'invalid value 2', + }, + ]; + const testNewDataset = createNewDatasetModel(undefined, invalidAlternativeTitleFieldValue); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'alternativeTitle', + 'There was an error when validating the field alternativeTitle from metadata block citation. Reason was: Expecting an array of strings, not sub fields', + ); + }); }); From 77651142a851108c74008acfa029c85fb6069f1c Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 15 Jan 2024 12:41:00 +0000 Subject: [PATCH 12/96] Added: not multiple field validation logic to NewDatasetValidator --- .../validators/NewDatasetValidator.ts | 36 ++++++++++++++++--- test/testHelpers/datasets/newDatasetHelper.ts | 4 ++- .../unit/datasets/NewDatasetValidator.test.ts | 29 +++++++++++++-- 3 files changed, 60 insertions(+), 9 deletions(-) diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 7ed745a2..b98c89de 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -22,10 +22,6 @@ export class NewDatasetValidator implements NewResourceValidator { const metadataFieldInfo: MetadataFieldInfo = metadataBlock.metadataFields[metadataFieldKey]; const newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue = metadataBlockValues.fields[metadataFieldKey]; - if (metadataFieldInfo.isRequired && newDatasetMetadataFieldValue == undefined) { - throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName); - } - this.validateMetadataFieldValueType( metadataFieldInfo, metadataFieldKey, @@ -35,17 +31,30 @@ export class NewDatasetValidator implements NewResourceValidator { if (metadataFieldInfo.childMetadataFields != undefined) { // TODO: child fields validation + /* for (const childMetadataFieldKey of Object.keys(metadataFieldInfo.childMetadataFields)) { + const childMetadataFieldInfo: MetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; + const newDatasetChildMetadataFieldValue: NewDatasetMetadataSubFieldValue | NewDatasetMetadataSubFieldValue[] = newDatasetMetadataFieldValue[childMetadataFieldKey] as NewDatasetMetadataSubFieldValue | string + this.validateMetadataFieldValueType( + childMetadataFieldInfo, + childMetadataFieldKey, + newDatasetMetadataFieldValue, + newDatasetMetadataBlockName, + ); + } */ } } } } - validateMetadataFieldValueType( + private validateMetadataFieldValueType( metadataFieldInfo: MetadataFieldInfo, metadataFieldKey: string, newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, newDatasetMetadataBlockName: string, ): void { + if (metadataFieldInfo.isRequired && newDatasetMetadataFieldValue == undefined) { + throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName); + } if (metadataFieldInfo.multiple) { if (!Array.isArray(newDatasetMetadataFieldValue)) { throw this.createValidationError( @@ -80,6 +89,23 @@ export class NewDatasetValidator implements NewResourceValidator { 'The provided array of values is not valid.', ); } + } else { + if (Array.isArray(newDatasetMetadataFieldValue)) { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + undefined, + 'Expecting a single field, not an array.', + ); + } + if (typeof newDatasetMetadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + undefined, + 'Expecting a string, not sub fields.', + ); + } } } diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 0cf77d79..fc6298b5 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -2,9 +2,11 @@ import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/ import { MetadataBlock } from '../../../src'; export const createNewDatasetModel = ( + titleFieldValue?: NewDatasetMetadataFieldValue | string, authorFieldValue?: NewDatasetMetadataFieldValue | string, alternativeTitleValue?: NewDatasetMetadataFieldValue | string, ): NewDataset => { + const validTitle = 'test dataset'; const validAuthorFieldValue = [ { authorName: 'Admin, Dataverse', @@ -21,7 +23,7 @@ export const createNewDatasetModel = ( { name: 'citation', fields: { - title: 'test dataset', + title: titleFieldValue !== undefined ? titleFieldValue : validTitle, author: authorFieldValue !== undefined ? authorFieldValue : validAuthorFieldValue, alternativeTitle: alternativeTitleValue !== undefined ? alternativeTitleValue : validAlternativeTitleValue, }, diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index a67b3065..b0c69bba 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -61,9 +61,32 @@ describe('execute', () => { ); }); + test('should raise an error when the provided field value for an unique field is an array', async () => { + const invalidTitleFieldValue = ['title1', 'title2']; + const testNewDataset = createNewDatasetModel(invalidTitleFieldValue, undefined, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'title', + 'There was an error when validating the field title from metadata block citation. Reason was: Expecting a single field, not an array.', + ); + }); + + test('should raise an error when the provided field value is an object and the field expects a string', async () => { + const invalidTitleFieldValue = { + invalidSubfield1: 'invalid value 1', + invalidSubfield2: 'invalid value 2', + }; + const testNewDataset = createNewDatasetModel(invalidTitleFieldValue, undefined, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'title', + 'There was an error when validating the field title from metadata block citation. Reason was: Expecting a string, not sub fields.', + ); + }); + test('should raise an error when the provided field value for a multiple field is a string', async () => { const invalidAuthorFieldValue = 'invalidValue'; - const testNewDataset = createNewDatasetModel(invalidAuthorFieldValue); + const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'author', @@ -73,7 +96,7 @@ describe('execute', () => { test('should raise an error when the provided field value is an array of strings and the field expects an array of objects', async () => { const invalidAuthorFieldValue = ['invalidValue1', 'invalidValue2']; - const testNewDataset = createNewDatasetModel(invalidAuthorFieldValue); + const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'author', @@ -92,7 +115,7 @@ describe('execute', () => { invalidSubfield2: 'invalid value 2', }, ]; - const testNewDataset = createNewDatasetModel(undefined, invalidAlternativeTitleFieldValue); + const testNewDataset = createNewDatasetModel(undefined, undefined, invalidAlternativeTitleFieldValue); await runValidateExpectingFieldValidationError( testNewDataset, 'alternativeTitle', From 3deaac7aa107a926da8d3df1613735212966e425 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 16 Jan 2024 08:48:32 +0000 Subject: [PATCH 13/96] Added: child field validation logic and refactor to NewDatasetValidator --- src/datasets/domain/models/NewDataset.ts | 6 +- .../validators/NewDatasetValidator.ts | 206 ++++++++++++------ .../unit/datasets/NewDatasetValidator.test.ts | 40 +++- 3 files changed, 176 insertions(+), 76 deletions(-) diff --git a/src/datasets/domain/models/NewDataset.ts b/src/datasets/domain/models/NewDataset.ts index d7e587ea..eaa0bd73 100644 --- a/src/datasets/domain/models/NewDataset.ts +++ b/src/datasets/domain/models/NewDataset.ts @@ -12,7 +12,7 @@ export type NewDatasetMetadataFields = Record; +export type NewDatasetMetadataChildFieldValue = Record; diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index b98c89de..91ba42c4 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -1,4 +1,4 @@ -import { NewDataset, NewDatasetMetadataFieldValue } from '../../models/NewDataset'; +import { NewDataset, NewDatasetMetadataFieldValue, NewDatasetMetadataChildFieldValue } from '../../models/NewDataset'; import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; import { IMetadataBlocksRepository } from '../../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { MetadataFieldInfo } from '../../../../metadataBlocks'; @@ -22,7 +22,7 @@ export class NewDatasetValidator implements NewResourceValidator { const metadataFieldInfo: MetadataFieldInfo = metadataBlock.metadataFields[metadataFieldKey]; const newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue = metadataBlockValues.fields[metadataFieldKey]; - this.validateMetadataFieldValueType( + this.validateMetadataFieldValue( metadataFieldInfo, metadataFieldKey, newDatasetMetadataFieldValue, @@ -30,82 +30,162 @@ export class NewDatasetValidator implements NewResourceValidator { ); if (metadataFieldInfo.childMetadataFields != undefined) { - // TODO: child fields validation - /* for (const childMetadataFieldKey of Object.keys(metadataFieldInfo.childMetadataFields)) { - const childMetadataFieldInfo: MetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; - const newDatasetChildMetadataFieldValue: NewDatasetMetadataSubFieldValue | NewDatasetMetadataSubFieldValue[] = newDatasetMetadataFieldValue[childMetadataFieldKey] as NewDatasetMetadataSubFieldValue | string - this.validateMetadataFieldValueType( - childMetadataFieldInfo, - childMetadataFieldKey, - newDatasetMetadataFieldValue, + const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); + if (metadataFieldInfo.multiple) { + const newDatasetMetadataFieldChildFieldValues = + newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValue[]; + for (const metadataChildFieldValue of newDatasetMetadataFieldChildFieldValues) { + this.validateChildMetadataFieldValues( + childMetadataFieldKeys, + metadataFieldInfo, + metadataChildFieldValue, + newDatasetMetadataBlockName, + metadataFieldKey, + ); + } + } else { + const metadataChildFieldValue = newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValue; + this.validateChildMetadataFieldValues( + childMetadataFieldKeys, + metadataFieldInfo, + metadataChildFieldValue, newDatasetMetadataBlockName, + metadataFieldKey, ); - } */ + } } } } } - private validateMetadataFieldValueType( + private validateChildMetadataFieldValues( + childMetadataFieldKeys: string[], + metadataFieldInfo: MetadataFieldInfo, + metadataChildFieldValue: Record, + newDatasetMetadataBlockName: string, + metadataParentFieldKey: string, + ) { + for (const childMetadataFieldKey of childMetadataFieldKeys) { + const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; + this.validateMetadataFieldValue( + childMetadataFieldInfo, + childMetadataFieldKey, + metadataChildFieldValue[childMetadataFieldKey], + newDatasetMetadataBlockName, + metadataParentFieldKey, + ); + } + } + + private validateMetadataFieldValue( metadataFieldInfo: MetadataFieldInfo, metadataFieldKey: string, newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, newDatasetMetadataBlockName: string, + metadataParentFieldKey?: string, ): void { - if (metadataFieldInfo.isRequired && newDatasetMetadataFieldValue == undefined) { - throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName); + if ( + newDatasetMetadataFieldValue == undefined || + newDatasetMetadataFieldValue == null || + (typeof newDatasetMetadataFieldValue == 'string' && newDatasetMetadataFieldValue.trim() === '') + ) { + if (metadataFieldInfo.isRequired) { + throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName, metadataParentFieldKey); + } else { + return; + } } if (metadataFieldInfo.multiple) { - if (!Array.isArray(newDatasetMetadataFieldValue)) { - throw this.createValidationError( - metadataFieldKey, - newDatasetMetadataBlockName, - undefined, - 'Expecting an array of values.', - ); - } - if (this.isValidArrayType(newDatasetMetadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { - throw this.createValidationError( - metadataFieldKey, - newDatasetMetadataBlockName, - undefined, - 'Expecting an array of sub fields, not strings.', - ); - } else if (this.isValidArrayType(newDatasetMetadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { - throw this.createValidationError( - metadataFieldKey, - newDatasetMetadataBlockName, - undefined, - 'Expecting an array of strings, not sub fields.', - ); - } else if ( - !this.isValidArrayType(newDatasetMetadataFieldValue, 'object') && - !this.isValidArrayType(newDatasetMetadataFieldValue, 'string') - ) { - throw this.createValidationError( - metadataFieldKey, - newDatasetMetadataBlockName, - undefined, - 'The provided array of values is not valid.', - ); - } + this.validateMultipleMetadataField( + newDatasetMetadataFieldValue, + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + metadataFieldInfo, + ); } else { - if (Array.isArray(newDatasetMetadataFieldValue)) { - throw this.createValidationError( - metadataFieldKey, - newDatasetMetadataBlockName, - undefined, - 'Expecting a single field, not an array.', - ); - } - if (typeof newDatasetMetadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { - throw this.createValidationError( - metadataFieldKey, - newDatasetMetadataBlockName, - undefined, - 'Expecting a string, not sub fields.', - ); - } + this.validateSingleMetadataField( + newDatasetMetadataFieldValue, + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + metadataFieldInfo, + ); + } + } + + private validateMultipleMetadataField( + newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, + metadataFieldKey: string, + newDatasetMetadataBlockName: string, + metadataParentFieldKey: string, + metadataFieldInfo: MetadataFieldInfo, + ) { + if (!Array.isArray(newDatasetMetadataFieldValue)) { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + 'Expecting an array of values.', + ); + } + if (this.isValidArrayType(newDatasetMetadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + 'Expecting an array of child fields, not strings.', + ); + } else if (this.isValidArrayType(newDatasetMetadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + 'Expecting an array of strings, not child fields.', + ); + } else if ( + !this.isValidArrayType(newDatasetMetadataFieldValue, 'object') && + !this.isValidArrayType(newDatasetMetadataFieldValue, 'string') + ) { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + 'The provided array of values is not valid.', + ); + } + } + + private validateSingleMetadataField( + newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, + metadataFieldKey: string, + newDatasetMetadataBlockName: string, + metadataParentFieldKey: string, + metadataFieldInfo: MetadataFieldInfo, + ) { + if (Array.isArray(newDatasetMetadataFieldValue)) { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + 'Expecting a single field, not an array.', + ); + } + if (typeof newDatasetMetadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + 'Expecting a string, not child fields.', + ); + } + if (typeof newDatasetMetadataFieldValue === 'string' && metadataFieldInfo.type === 'NONE') { + throw this.createValidationError( + metadataFieldKey, + newDatasetMetadataBlockName, + metadataParentFieldKey, + 'Expecting child fields, not a string.', + ); } } diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index b0c69bba..a0c48cd4 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -30,6 +30,7 @@ describe('execute', () => { newDataset: NewDataset, expectedMetadataFieldName: string, expectedErrorMessage: string, + expectedParentMetadataFieldName?: string, ): Promise { const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); await sut @@ -41,7 +42,7 @@ describe('execute', () => { const fieldValidationError = error as T; assert.match(fieldValidationError.citationBlockName, 'citation'); assert.match(fieldValidationError.metadataFieldName, expectedMetadataFieldName); - assert.match(fieldValidationError.parentMetadataFieldName, undefined); + assert.match(fieldValidationError.parentMetadataFieldName, expectedParentMetadataFieldName); assert.match(fieldValidationError.message, expectedErrorMessage); }); } @@ -73,14 +74,14 @@ describe('execute', () => { test('should raise an error when the provided field value is an object and the field expects a string', async () => { const invalidTitleFieldValue = { - invalidSubfield1: 'invalid value 1', - invalidSubfield2: 'invalid value 2', + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', }; const testNewDataset = createNewDatasetModel(invalidTitleFieldValue, undefined, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'title', - 'There was an error when validating the field title from metadata block citation. Reason was: Expecting a string, not sub fields.', + 'There was an error when validating the field title from metadata block citation. Reason was: Expecting a string, not child fields.', ); }); @@ -100,26 +101,45 @@ describe('execute', () => { await runValidateExpectingFieldValidationError( testNewDataset, 'author', - 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of sub fields, not strings', + 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of child fields, not strings', ); }); test('should raise an error when the provided field value is an array of objects and the field expects an array of strings', async () => { const invalidAlternativeTitleFieldValue = [ { - invalidSubfield1: 'invalid value 1', - invalidSubfield2: 'invalid value 2', + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', }, { - invalidSubfield1: 'invalid value 1', - invalidSubfield2: 'invalid value 2', + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', }, ]; const testNewDataset = createNewDatasetModel(undefined, undefined, invalidAlternativeTitleFieldValue); await runValidateExpectingFieldValidationError( testNewDataset, 'alternativeTitle', - 'There was an error when validating the field alternativeTitle from metadata block citation. Reason was: Expecting an array of strings, not sub fields', + 'There was an error when validating the field alternativeTitle from metadata block citation. Reason was: Expecting an array of strings, not child fields', + ); + }); + + test('should raise an empty field error when a child field is missing', async () => { + const invalidAuthorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorAffiliation: 'Dataverse.org', + }, + ]; + const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'authorName', + 'There was an error when validating the field authorName from metadata block citation with parent field author. Reason was: The field should not be empty.', + 'author', ); }); }); From 822f384a610806655128a967acb9aaf50c022ce5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 16 Jan 2024 12:53:56 +0000 Subject: [PATCH 14/96] Added: empty required array field error handling and refactor to NewDatasetValidator --- .../errors/ControlledVocabularyFieldError.ts | 18 ++ .../validators/errors/EmptyFieldError.ts | 15 +- .../validators/errors/FieldValidationError.ts | 13 +- .../validators/NewDatasetValidator.ts | 254 +++++++++++------- test/testHelpers/datasets/newDatasetHelper.ts | 6 +- .../unit/datasets/NewDatasetValidator.test.ts | 19 +- 6 files changed, 218 insertions(+), 107 deletions(-) create mode 100644 src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts diff --git a/src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts b/src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts new file mode 100644 index 00000000..77c0c33c --- /dev/null +++ b/src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts @@ -0,0 +1,18 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class ControlledVocabularyFieldError extends FieldValidationError { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field does have a valid controlled vocabulary value.', + ); + } +} diff --git a/src/core/domain/useCases/validators/errors/EmptyFieldError.ts b/src/core/domain/useCases/validators/errors/EmptyFieldError.ts index 52c3ae80..e1ca1d7a 100644 --- a/src/core/domain/useCases/validators/errors/EmptyFieldError.ts +++ b/src/core/domain/useCases/validators/errors/EmptyFieldError.ts @@ -1,7 +1,18 @@ import { FieldValidationError } from './FieldValidationError'; export class EmptyFieldError extends FieldValidationError { - constructor(metadataFieldName: string, citationBlockName: string, parentMetadataFieldName?: string) { - super(metadataFieldName, citationBlockName, parentMetadataFieldName, 'The field should not be empty.'); + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field should not be empty.', + ); } } diff --git a/src/core/domain/useCases/validators/errors/FieldValidationError.ts b/src/core/domain/useCases/validators/errors/FieldValidationError.ts index ff1c830b..6efa5e6e 100644 --- a/src/core/domain/useCases/validators/errors/FieldValidationError.ts +++ b/src/core/domain/useCases/validators/errors/FieldValidationError.ts @@ -4,12 +4,22 @@ export class FieldValidationError extends ResourceValidationError { citationBlockName: string; metadataFieldName: string; parentMetadataFieldName?: string; + fieldPosition?: number; - constructor(metadataFieldName: string, citationBlockName: string, parentMetadataFieldName?: string, reason?: string) { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + reason?: string, + ) { let message = `There was an error when validating the field ${metadataFieldName} from metadata block ${citationBlockName}`; if (parentMetadataFieldName) { message += ` with parent field ${parentMetadataFieldName}`; } + if (fieldPosition) { + message += ` in position ${fieldPosition}`; + } if (reason) { message += `. Reason was: ${reason}`; } @@ -17,5 +27,6 @@ export class FieldValidationError extends ResourceValidationError { this.citationBlockName = citationBlockName; this.metadataFieldName = metadataFieldName; this.parentMetadataFieldName = parentMetadataFieldName; + this.fieldPosition = fieldPosition; } } diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 91ba42c4..96d3a9bc 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -5,6 +5,7 @@ import { MetadataFieldInfo } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; import { EmptyFieldError } from '../../../../core/domain/useCases/validators/errors/EmptyFieldError'; import { FieldValidationError } from '../../../../core/domain/useCases/validators/errors/FieldValidationError'; +import { ControlledVocabularyFieldError } from '../../../../core/domain/useCases/validators/errors/ControlledVocabularyFieldError'; export class NewDatasetValidator implements NewResourceValidator { private metadataBlockRepository: IMetadataBlocksRepository; @@ -15,195 +16,252 @@ export class NewDatasetValidator implements NewResourceValidator { async validate(resource: NewDataset): Promise { for (const metadataBlockValues of resource.metadataBlockValues) { - const newDatasetMetadataBlockName = metadataBlockValues.name; + const metadataBlockName = metadataBlockValues.name; - const metadataBlock = await this.metadataBlockRepository.getMetadataBlockByName(newDatasetMetadataBlockName); + const metadataBlock = await this.metadataBlockRepository.getMetadataBlockByName(metadataBlockName); for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { - const metadataFieldInfo: MetadataFieldInfo = metadataBlock.metadataFields[metadataFieldKey]; - const newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue = metadataBlockValues.fields[metadataFieldKey]; - - this.validateMetadataFieldValue( - metadataFieldInfo, + this.validateMetadataField( + metadataBlock.metadataFields[metadataFieldKey], metadataFieldKey, - newDatasetMetadataFieldValue, - newDatasetMetadataBlockName, + metadataBlockValues.fields[metadataFieldKey], + metadataBlockName, ); - - if (metadataFieldInfo.childMetadataFields != undefined) { - const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); - if (metadataFieldInfo.multiple) { - const newDatasetMetadataFieldChildFieldValues = - newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValue[]; - for (const metadataChildFieldValue of newDatasetMetadataFieldChildFieldValues) { - this.validateChildMetadataFieldValues( - childMetadataFieldKeys, - metadataFieldInfo, - metadataChildFieldValue, - newDatasetMetadataBlockName, - metadataFieldKey, - ); - } - } else { - const metadataChildFieldValue = newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValue; - this.validateChildMetadataFieldValues( - childMetadataFieldKeys, - metadataFieldInfo, - metadataChildFieldValue, - newDatasetMetadataBlockName, - metadataFieldKey, - ); - } - } } } } - private validateChildMetadataFieldValues( - childMetadataFieldKeys: string[], - metadataFieldInfo: MetadataFieldInfo, - metadataChildFieldValue: Record, - newDatasetMetadataBlockName: string, - metadataParentFieldKey: string, - ) { - for (const childMetadataFieldKey of childMetadataFieldKeys) { - const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; - this.validateMetadataFieldValue( - childMetadataFieldInfo, - childMetadataFieldKey, - metadataChildFieldValue[childMetadataFieldKey], - newDatasetMetadataBlockName, - metadataParentFieldKey, - ); - } - } - - private validateMetadataFieldValue( + private validateMetadataField( metadataFieldInfo: MetadataFieldInfo, metadataFieldKey: string, - newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, - newDatasetMetadataBlockName: string, + metadataFieldValue: NewDatasetMetadataFieldValue, + metadataBlockName: string, metadataParentFieldKey?: string, + metadataFieldPosition?: number, ): void { if ( - newDatasetMetadataFieldValue == undefined || - newDatasetMetadataFieldValue == null || - (typeof newDatasetMetadataFieldValue == 'string' && newDatasetMetadataFieldValue.trim() === '') + metadataFieldValue == undefined || + metadataFieldValue == null || + (typeof metadataFieldValue == 'string' && metadataFieldValue.trim() === '') || + (Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0) ) { if (metadataFieldInfo.isRequired) { - throw new EmptyFieldError(metadataFieldKey, newDatasetMetadataBlockName, metadataParentFieldKey); + throw new EmptyFieldError(metadataFieldKey, metadataBlockName, metadataParentFieldKey, metadataFieldPosition); } else { return; } } if (metadataFieldInfo.multiple) { this.validateMultipleMetadataField( - newDatasetMetadataFieldValue, + metadataFieldValue, metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, metadataFieldInfo, + metadataFieldPosition, ); } else { this.validateSingleMetadataField( - newDatasetMetadataFieldValue, + metadataFieldValue, metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, metadataFieldInfo, + metadataFieldPosition, ); } } private validateMultipleMetadataField( - newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, + metadataFieldValue: NewDatasetMetadataFieldValue, metadataFieldKey: string, - newDatasetMetadataBlockName: string, + metadataBlockName: string, metadataParentFieldKey: string, metadataFieldInfo: MetadataFieldInfo, + metadataFieldPosition: number, ) { - if (!Array.isArray(newDatasetMetadataFieldValue)) { - throw this.createValidationError( + if (!Array.isArray(metadataFieldValue)) { + throw this.createGeneralValidationError( metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, + metadataFieldPosition, 'Expecting an array of values.', ); } - if (this.isValidArrayType(newDatasetMetadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { - throw this.createValidationError( + if (this.isValidArrayType(metadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { + throw this.createGeneralValidationError( metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, + metadataFieldPosition, 'Expecting an array of child fields, not strings.', ); - } else if (this.isValidArrayType(newDatasetMetadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { - throw this.createValidationError( + } else if (this.isValidArrayType(metadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { + throw this.createGeneralValidationError( metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, + metadataFieldPosition, 'Expecting an array of strings, not child fields.', ); } else if ( - !this.isValidArrayType(newDatasetMetadataFieldValue, 'object') && - !this.isValidArrayType(newDatasetMetadataFieldValue, 'string') + !this.isValidArrayType(metadataFieldValue, 'object') && + !this.isValidArrayType(metadataFieldValue, 'string') ) { - throw this.createValidationError( + throw this.createGeneralValidationError( metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, + metadataFieldPosition, 'The provided array of values is not valid.', ); } + + const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValue[]; + fieldValues.forEach((value, metadataFieldPosition) => { + this.validateFieldValue( + metadataFieldInfo, + value, + metadataBlockName, + metadataFieldKey, + metadataParentFieldKey, + metadataFieldPosition, + ); + }); } private validateSingleMetadataField( - newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, + metadataFieldValue: NewDatasetMetadataFieldValue, metadataFieldKey: string, - newDatasetMetadataBlockName: string, + metadataBlockName: string, metadataParentFieldKey: string, metadataFieldInfo: MetadataFieldInfo, + metadataFieldPosition: number, ) { - if (Array.isArray(newDatasetMetadataFieldValue)) { - throw this.createValidationError( + if (Array.isArray(metadataFieldValue)) { + throw this.createGeneralValidationError( metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, + metadataFieldPosition, 'Expecting a single field, not an array.', ); } - if (typeof newDatasetMetadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { - throw this.createValidationError( + if (typeof metadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { + throw this.createGeneralValidationError( metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, + metadataFieldPosition, 'Expecting a string, not child fields.', ); } - if (typeof newDatasetMetadataFieldValue === 'string' && metadataFieldInfo.type === 'NONE') { - throw this.createValidationError( + if (typeof metadataFieldValue === 'string' && metadataFieldInfo.type === 'NONE') { + throw this.createGeneralValidationError( metadataFieldKey, - newDatasetMetadataBlockName, + metadataBlockName, metadataParentFieldKey, + metadataFieldPosition, 'Expecting child fields, not a string.', ); } + this.validateFieldValue( + metadataFieldInfo, + metadataFieldValue, + metadataBlockName, + metadataFieldKey, + metadataParentFieldKey, + metadataFieldPosition, + ); + } + + private validateFieldValue( + metadataFieldInfo: MetadataFieldInfo, + value: NewDatasetMetadataFieldValue, + metadataBlockName: string, + metadataFieldKey: string, + metadataParentFieldKey: string, + metadataFieldPosition: number, + ) { + if (metadataFieldInfo.isControlledVocabulary) { + this.validateControlledVocabularyFieldValue( + metadataFieldInfo, + value as string, + metadataBlockName, + metadataFieldKey, + metadataParentFieldKey, + metadataFieldPosition, + ); + } else if (metadataFieldInfo.childMetadataFields != undefined) { + this.validateChildMetadataFieldValues( + metadataFieldInfo, + value as NewDatasetMetadataChildFieldValue, + metadataBlockName, + metadataFieldKey, + metadataFieldPosition, + ); + } + } + + private validateControlledVocabularyFieldValue( + metadataFieldInfo: MetadataFieldInfo, + controledVocabularyValue: string, + metadataBlockName: string, + metadataFieldKey: string, + metadataParentFieldKey?: string, + metadataFieldPosition?: number, + ) { + if (!metadataFieldInfo.controlledVocabularyValues.includes(controledVocabularyValue)) { + throw new ControlledVocabularyFieldError( + metadataFieldKey, + metadataBlockName, + metadataParentFieldKey, + metadataFieldPosition, + ); + } + } + + private validateChildMetadataFieldValues( + metadataFieldInfo: MetadataFieldInfo, + metadataChildFieldValue: NewDatasetMetadataChildFieldValue, + metadataBlockName: string, + metadataParentFieldKey: string, + metadataFieldPosition?: number, + ) { + const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); + for (const childMetadataFieldKey of childMetadataFieldKeys) { + const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; + this.validateMetadataField( + childMetadataFieldInfo, + childMetadataFieldKey, + metadataChildFieldValue[childMetadataFieldKey], + metadataBlockName, + metadataParentFieldKey, + metadataFieldPosition, + ); + } } private isValidArrayType( - newDatasetMetadataFieldValue: Array, + metadataFieldValue: Array, expectedType: 'string' | 'object', ): boolean { - return newDatasetMetadataFieldValue.every( - (item: string | NewDatasetMetadataFieldValue) => typeof item === expectedType, - ); + return metadataFieldValue.every((item: string | NewDatasetMetadataFieldValue) => typeof item === expectedType); } - private createValidationError( + private createGeneralValidationError( metadataFieldKey: string, - newDatasetMetadataBlockName: string, + metadataBlockName: string, parentMetadataFieldName: string | undefined, + metadataFieldPosition: number | undefined, reason: string, ): FieldValidationError { - return new FieldValidationError(metadataFieldKey, newDatasetMetadataBlockName, parentMetadataFieldName, reason); + return new FieldValidationError( + metadataFieldKey, + metadataBlockName, + parentMetadataFieldName, + metadataFieldPosition, + reason, + ); } } diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index fc6298b5..2d91d344 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -2,9 +2,9 @@ import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/ import { MetadataBlock } from '../../../src'; export const createNewDatasetModel = ( - titleFieldValue?: NewDatasetMetadataFieldValue | string, - authorFieldValue?: NewDatasetMetadataFieldValue | string, - alternativeTitleValue?: NewDatasetMetadataFieldValue | string, + titleFieldValue?: NewDatasetMetadataFieldValue, + authorFieldValue?: NewDatasetMetadataFieldValue, + alternativeTitleValue?: NewDatasetMetadataFieldValue, ): NewDataset => { const validTitle = 'test dataset'; const validAuthorFieldValue = [ diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index a0c48cd4..c3efb7b1 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -9,7 +9,7 @@ import { fail } from 'assert'; import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { EmptyFieldError } from '../../../src/core/domain/useCases/validators/errors/EmptyFieldError'; import { FieldValidationError } from '../../../src/core/domain/useCases/validators/errors/FieldValidationError'; -import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; +import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; describe('execute', () => { const sandbox: SinonSandbox = createSandbox(); @@ -31,6 +31,7 @@ describe('execute', () => { expectedMetadataFieldName: string, expectedErrorMessage: string, expectedParentMetadataFieldName?: string, + expectedPosition?: number, ): Promise { const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); await sut @@ -43,6 +44,7 @@ describe('execute', () => { assert.match(fieldValidationError.citationBlockName, 'citation'); assert.match(fieldValidationError.metadataFieldName, expectedMetadataFieldName); assert.match(fieldValidationError.parentMetadataFieldName, expectedParentMetadataFieldName); + assert.match(fieldValidationError.fieldPosition, expectedPosition); assert.match(fieldValidationError.message, expectedErrorMessage); }); } @@ -54,7 +56,7 @@ describe('execute', () => { await sut.validate(testNewDataset).catch((e) => fail(e)); }); - test('should raise an empty field error when a first level field is missing', async () => { + test('should raise an empty field error when a first level required string field is missing', async () => { await runValidateExpectingFieldValidationError( createNewDatasetModelWithoutFirstLevelRequiredField(), 'author', @@ -62,6 +64,16 @@ describe('execute', () => { ); }); + test('should raise an empty field error when a first level required array field is empty', async () => { + const invalidAuthorFieldValue : NewDatasetMetadataFieldValue = []; + const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', + ); + }); + test('should raise an error when the provided field value for an unique field is an array', async () => { const invalidTitleFieldValue = ['title1', 'title2']; const testNewDataset = createNewDatasetModel(invalidTitleFieldValue, undefined, undefined); @@ -138,8 +150,9 @@ describe('execute', () => { await runValidateExpectingFieldValidationError( testNewDataset, 'authorName', - 'There was an error when validating the field authorName from metadata block citation with parent field author. Reason was: The field should not be empty.', + 'There was an error when validating the field authorName from metadata block citation with parent field author in position 1. Reason was: The field should not be empty.', 'author', + 1, ); }); }); From 19c6400169aa5e765eea78e998da74e057c8ad38 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 16 Jan 2024 12:55:53 +0000 Subject: [PATCH 15/96] Refactor: validation methods extracted in NewDatasetValidator --- .../useCases/validators/NewDatasetValidator.ts | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 96d3a9bc..390c7c17 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -41,8 +41,8 @@ export class NewDatasetValidator implements NewResourceValidator { if ( metadataFieldValue == undefined || metadataFieldValue == null || - (typeof metadataFieldValue == 'string' && metadataFieldValue.trim() === '') || - (Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0) + this.isEmptyString(metadataFieldValue) || + this.isEmptyArray(metadataFieldValue) ) { if (metadataFieldInfo.isRequired) { throw new EmptyFieldError(metadataFieldKey, metadataBlockName, metadataParentFieldKey, metadataFieldPosition); @@ -242,6 +242,14 @@ export class NewDatasetValidator implements NewResourceValidator { } } + private isEmptyString(metadataFieldValue: NewDatasetMetadataFieldValue): boolean { + return typeof metadataFieldValue == 'string' && metadataFieldValue.trim() === ''; + } + + private isEmptyArray(metadataFieldValue: NewDatasetMetadataFieldValue): boolean { + return Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0; + } + private isValidArrayType( metadataFieldValue: Array, expectedType: 'string' | 'object', From 6d5028d974586b9979e6aae8936ac5404140156c Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 16 Jan 2024 12:57:58 +0000 Subject: [PATCH 16/96] Refactor: validateMetadataBlock extracted in NewDatasetValidator --- .../validators/NewDatasetValidator.ts | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 390c7c17..684e5f23 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -1,4 +1,9 @@ -import { NewDataset, NewDatasetMetadataFieldValue, NewDatasetMetadataChildFieldValue } from '../../models/NewDataset'; +import { + NewDataset, + NewDatasetMetadataFieldValue, + NewDatasetMetadataChildFieldValue, + NewDatasetMetadataBlockValues, +} from '../../models/NewDataset'; import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; import { IMetadataBlocksRepository } from '../../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { MetadataFieldInfo } from '../../../../metadataBlocks'; @@ -16,17 +21,20 @@ export class NewDatasetValidator implements NewResourceValidator { async validate(resource: NewDataset): Promise { for (const metadataBlockValues of resource.metadataBlockValues) { - const metadataBlockName = metadataBlockValues.name; + await this.validateMetadataBlock(metadataBlockValues); + } + } - const metadataBlock = await this.metadataBlockRepository.getMetadataBlockByName(metadataBlockName); - for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { - this.validateMetadataField( - metadataBlock.metadataFields[metadataFieldKey], - metadataFieldKey, - metadataBlockValues.fields[metadataFieldKey], - metadataBlockName, - ); - } + private async validateMetadataBlock(metadataBlockValues: NewDatasetMetadataBlockValues) { + const metadataBlockName = metadataBlockValues.name; + const metadataBlock = await this.metadataBlockRepository.getMetadataBlockByName(metadataBlockName); + for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { + this.validateMetadataField( + metadataBlock.metadataFields[metadataFieldKey], + metadataFieldKey, + metadataBlockValues.fields[metadataFieldKey], + metadataBlockName, + ); } } From 705cd2c64f1d511e525d52bc4334b7542d235055 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 16 Jan 2024 13:17:24 +0000 Subject: [PATCH 17/96] Added: date format validation to NewDatasetValidator and associated unit test --- .../validators/errors/DateFormatFieldError.ts | 18 ++++++++++ .../validators/NewDatasetValidator.ts | 33 ++++++++++++++++++- test/testHelpers/datasets/newDatasetHelper.ts | 30 +++++++++++++---- .../unit/datasets/NewDatasetValidator.test.ts | 32 +++++++++++++++--- 4 files changed, 102 insertions(+), 11 deletions(-) create mode 100644 src/core/domain/useCases/validators/errors/DateFormatFieldError.ts diff --git a/src/core/domain/useCases/validators/errors/DateFormatFieldError.ts b/src/core/domain/useCases/validators/errors/DateFormatFieldError.ts new file mode 100644 index 00000000..a6b36fa5 --- /dev/null +++ b/src/core/domain/useCases/validators/errors/DateFormatFieldError.ts @@ -0,0 +1,18 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class DateFormatFieldError extends FieldValidationError { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field requires a valid date format (YYYY-MM-DD).', + ); + } +} diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 684e5f23..502d3e4e 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -11,6 +11,7 @@ import { ResourceValidationError } from '../../../../core/domain/useCases/valida import { EmptyFieldError } from '../../../../core/domain/useCases/validators/errors/EmptyFieldError'; import { FieldValidationError } from '../../../../core/domain/useCases/validators/errors/FieldValidationError'; import { ControlledVocabularyFieldError } from '../../../../core/domain/useCases/validators/errors/ControlledVocabularyFieldError'; +import { DateFormatFieldError } from '../../../../core/domain/useCases/validators/errors/DateFormatFieldError'; export class NewDatasetValidator implements NewResourceValidator { private metadataBlockRepository: IMetadataBlocksRepository; @@ -200,7 +201,19 @@ export class NewDatasetValidator implements NewResourceValidator { metadataParentFieldKey, metadataFieldPosition, ); - } else if (metadataFieldInfo.childMetadataFields != undefined) { + } + + if (metadataFieldInfo.type == 'DATE') { + this.validateDateFieldValue( + value as string, + metadataBlockName, + metadataFieldKey, + metadataParentFieldKey, + metadataFieldPosition, + ); + } + + if (metadataFieldInfo.childMetadataFields != undefined) { this.validateChildMetadataFieldValues( metadataFieldInfo, value as NewDatasetMetadataChildFieldValue, @@ -229,6 +242,24 @@ export class NewDatasetValidator implements NewResourceValidator { } } + private validateDateFieldValue( + dateFieldValue: string, + metadataBlockName: string, + metadataFieldKey: string, + metadataParentFieldKey?: string, + metadataFieldPosition?: number, + ) { + const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; + if (!dateFormatRegex.test(dateFieldValue)) { + throw new DateFormatFieldError( + metadataFieldKey, + metadataBlockName, + metadataParentFieldKey, + metadataFieldPosition, + ); + } + } + private validateChildMetadataFieldValues( metadataFieldInfo: MetadataFieldInfo, metadataChildFieldValue: NewDatasetMetadataChildFieldValue, diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 2d91d344..19445326 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -4,7 +4,8 @@ import { MetadataBlock } from '../../../src'; export const createNewDatasetModel = ( titleFieldValue?: NewDatasetMetadataFieldValue, authorFieldValue?: NewDatasetMetadataFieldValue, - alternativeTitleValue?: NewDatasetMetadataFieldValue, + alternativeRequiredTitleValue?: NewDatasetMetadataFieldValue, + timePeriodCoveredStartValue?: NewDatasetMetadataFieldValue, ): NewDataset => { const validTitle = 'test dataset'; const validAuthorFieldValue = [ @@ -17,7 +18,7 @@ export const createNewDatasetModel = ( authorAffiliation: 'Dataverse.org', }, ]; - const validAlternativeTitleValue = ['alternative1', 'alternative2']; + const validAlternativeRequiredTitleValue = ['alternative1', 'alternative2']; return { metadataBlockValues: [ { @@ -25,7 +26,11 @@ export const createNewDatasetModel = ( fields: { title: titleFieldValue !== undefined ? titleFieldValue : validTitle, author: authorFieldValue !== undefined ? authorFieldValue : validAuthorFieldValue, - alternativeTitle: alternativeTitleValue !== undefined ? alternativeTitleValue : validAlternativeTitleValue, + alternativeRequiredTitle: + alternativeRequiredTitleValue !== undefined + ? alternativeRequiredTitleValue + : validAlternativeRequiredTitleValue, + ...(timePeriodCoveredStartValue && { timePeriodCoveredStart: timePeriodCoveredStartValue }), }, }, ], @@ -127,9 +132,9 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { }, }, }, - alternativeTitle: { - name: 'alternativeTitle', - displayName: 'Alternative Title', + alternativeRequiredTitle: { + name: 'alternativeRequiredTitle', + displayName: 'Alternative Required Title', title: 'Alternative Title', type: 'TEXT', watermark: '', @@ -140,6 +145,19 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { isRequired: true, displayOrder: 4, }, + timePeriodCoveredStart: { + name: 'timePeriodCoveredStart', + displayName: 'Time Period Start Date', + title: 'Start Date', + type: 'DATE', + watermark: 'YYYY-MM-DD', + description: 'The start date of the time period that the data refer to', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#NAME: #VALUE ', + isRequired: false, + displayOrder: 4, + }, }, }; }; diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index c3efb7b1..07dad8d2 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -65,7 +65,7 @@ describe('execute', () => { }); test('should raise an empty field error when a first level required array field is empty', async () => { - const invalidAuthorFieldValue : NewDatasetMetadataFieldValue = []; + const invalidAuthorFieldValue: NewDatasetMetadataFieldValue = []; const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); await runValidateExpectingFieldValidationError( testNewDataset, @@ -131,12 +131,12 @@ describe('execute', () => { const testNewDataset = createNewDatasetModel(undefined, undefined, invalidAlternativeTitleFieldValue); await runValidateExpectingFieldValidationError( testNewDataset, - 'alternativeTitle', - 'There was an error when validating the field alternativeTitle from metadata block citation. Reason was: Expecting an array of strings, not child fields', + 'alternativeRequiredTitle', + 'There was an error when validating the field alternativeRequiredTitle from metadata block citation. Reason was: Expecting an array of strings, not child fields', ); }); - test('should raise an empty field error when a child field is missing', async () => { + test('should raise an empty field error when a required child field is missing', async () => { const invalidAuthorFieldValue = [ { authorName: 'Admin, Dataverse', @@ -155,4 +155,28 @@ describe('execute', () => { 1, ); }); + + test('should not raise an empty field error when a not required child field is missing', async () => { + const authorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'John, Doe', + }, + ]; + const testNewDataset = createNewDatasetModel(undefined, authorFieldValue, undefined); + const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); + await sut.validate(testNewDataset).catch((e) => fail(e)); + }); + + test('should raise a date format validation error when a date field has an invalid format', async () => { + const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, '1-1-2020'); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'timePeriodCoveredStart', + 'There was an error when validating the field timePeriodCoveredStart from metadata block citation. Reason was: The field requires a valid date format (YYYY-MM-DD).', + ); + }); }); From 335fcecbed42a7532091bc559c7913b685c216d0 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 16 Jan 2024 13:35:40 +0000 Subject: [PATCH 18/96] Added: test cases for controlled vocabulary and date fields --- .../errors/ControlledVocabularyFieldError.ts | 2 +- test/testHelpers/datasets/newDatasetHelper.ts | 72 ++++++++++++++++++- .../unit/datasets/NewDatasetValidator.test.ts | 25 ++++++- 3 files changed, 96 insertions(+), 3 deletions(-) diff --git a/src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts b/src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts index 77c0c33c..b628f53f 100644 --- a/src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts +++ b/src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts @@ -12,7 +12,7 @@ export class ControlledVocabularyFieldError extends FieldValidationError { citationBlockName, parentMetadataFieldName, fieldPosition, - 'The field does have a valid controlled vocabulary value.', + 'The field does not have a valid controlled vocabulary value.', ); } } diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 19445326..f24fab2d 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -6,6 +6,7 @@ export const createNewDatasetModel = ( authorFieldValue?: NewDatasetMetadataFieldValue, alternativeRequiredTitleValue?: NewDatasetMetadataFieldValue, timePeriodCoveredStartValue?: NewDatasetMetadataFieldValue, + contributorTypeValue?: NewDatasetMetadataFieldValue, ): NewDataset => { const validTitle = 'test dataset'; const validAuthorFieldValue = [ @@ -31,6 +32,14 @@ export const createNewDatasetModel = ( ? alternativeRequiredTitleValue : validAlternativeRequiredTitleValue, ...(timePeriodCoveredStartValue && { timePeriodCoveredStart: timePeriodCoveredStartValue }), + ...(contributorTypeValue && { + contributor: [ + { + contributorName: 'Admin, Dataverse', + contributorType: contributorTypeValue as string, + }, + ], + }), }, }, ], @@ -156,7 +165,68 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { isControlledVocabulary: false, displayFormat: '#NAME: #VALUE ', isRequired: false, - displayOrder: 4, + displayOrder: 5, + }, + contributor: { + name: 'contributor', + displayName: 'Contributor', + title: 'Contributor', + type: 'NONE', + watermark: '', + description: + 'The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset', + multiple: true, + isControlledVocabulary: false, + displayFormat: ':', + isRequired: false, + displayOrder: 6, + childMetadataFields: { + contributorType: { + name: 'contributorType', + displayName: 'Contributor Type', + title: 'Type', + type: 'TEXT', + watermark: '', + description: 'Indicates the type of contribution made to the dataset', + multiple: false, + isControlledVocabulary: true, + displayFormat: '#VALUE ', + isRequired: false, + displayOrder: 7, + controlledVocabularyValues: [ + 'Data Collector', + 'Data Curator', + 'Data Manager', + 'Editor', + 'Funder', + 'Hosting Institution', + 'Project Leader', + 'Project Manager', + 'Project Member', + 'Related Person', + 'Researcher', + 'Research Group', + 'Rights Holder', + 'Sponsor', + 'Supervisor', + 'Work Package Leader', + 'Other', + ], + }, + contributorName: { + name: 'contributorName', + displayName: 'Contributor Name', + title: 'Name', + type: 'TEXT', + watermark: '1) FamilyName, GivenName or 2) Organization', + description: "The name of the contributor, e.g. the person's name or the name of an organization", + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 8, + }, + }, }, }, }; diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index 07dad8d2..cdd14ce9 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -49,7 +49,7 @@ describe('execute', () => { }); } - test('should not raise validation error when new dataset is valid', async () => { + test('should not raise a validation error when a new dataset with only the required fields is valid', async () => { const testNewDataset = createNewDatasetModel(); const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); @@ -179,4 +179,27 @@ describe('execute', () => { 'There was an error when validating the field timePeriodCoveredStart from metadata block citation. Reason was: The field requires a valid date format (YYYY-MM-DD).', ); }); + + test('should not raise a date format validation error when a date field has a valid format', async () => { + const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, '2020-01-01'); + const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); + await sut.validate(testNewDataset).catch((e) => fail(e)); + }); + + test('should raise a controlled vocabulary error when a controlled vocabulary field has an invalid format', async () => { + const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, undefined, 'Wrong Value'); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'contributorType', + 'There was an error when validating the field contributorType from metadata block citation with parent field contributor. Reason was: The field does not have a valid controlled vocabulary value.', + 'contributor', + 0, + ); + }); + + test('should not raise a controlled vocabulary error when the value for a controlled vocabulary field is correct', async () => { + const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, undefined, 'Project Member'); + const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); + await sut.validate(testNewDataset).catch((e) => fail(e)); + }); }); From b577bc829370c31848ff4c1f33c9f2a289f05fbd Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 16 Jan 2024 13:50:21 +0000 Subject: [PATCH 19/96] Refactor: dataset validator resource validation errors localtion --- .../domain/useCases/validators/NewDatasetValidator.ts | 8 ++++---- .../validators/errors/ControlledVocabularyFieldError.ts | 0 .../useCases/validators/errors/DateFormatFieldError.ts | 0 .../domain/useCases/validators/errors/EmptyFieldError.ts | 0 .../useCases/validators/errors/FieldValidationError.ts | 2 +- test/unit/datasets/NewDatasetValidator.test.ts | 4 ++-- 6 files changed, 7 insertions(+), 7 deletions(-) rename src/{core => datasets}/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts (100%) rename src/{core => datasets}/domain/useCases/validators/errors/DateFormatFieldError.ts (100%) rename src/{core => datasets}/domain/useCases/validators/errors/EmptyFieldError.ts (100%) rename src/{core => datasets}/domain/useCases/validators/errors/FieldValidationError.ts (88%) diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 502d3e4e..61b1dfbc 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -8,10 +8,10 @@ import { NewResourceValidator } from '../../../../core/domain/useCases/validator import { IMetadataBlocksRepository } from '../../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { MetadataFieldInfo } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; -import { EmptyFieldError } from '../../../../core/domain/useCases/validators/errors/EmptyFieldError'; -import { FieldValidationError } from '../../../../core/domain/useCases/validators/errors/FieldValidationError'; -import { ControlledVocabularyFieldError } from '../../../../core/domain/useCases/validators/errors/ControlledVocabularyFieldError'; -import { DateFormatFieldError } from '../../../../core/domain/useCases/validators/errors/DateFormatFieldError'; +import { EmptyFieldError } from './errors/EmptyFieldError'; +import { FieldValidationError } from './errors/FieldValidationError'; +import { ControlledVocabularyFieldError } from './errors/ControlledVocabularyFieldError'; +import { DateFormatFieldError } from './errors/DateFormatFieldError'; export class NewDatasetValidator implements NewResourceValidator { private metadataBlockRepository: IMetadataBlocksRepository; diff --git a/src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts b/src/datasets/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts similarity index 100% rename from src/core/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts rename to src/datasets/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts diff --git a/src/core/domain/useCases/validators/errors/DateFormatFieldError.ts b/src/datasets/domain/useCases/validators/errors/DateFormatFieldError.ts similarity index 100% rename from src/core/domain/useCases/validators/errors/DateFormatFieldError.ts rename to src/datasets/domain/useCases/validators/errors/DateFormatFieldError.ts diff --git a/src/core/domain/useCases/validators/errors/EmptyFieldError.ts b/src/datasets/domain/useCases/validators/errors/EmptyFieldError.ts similarity index 100% rename from src/core/domain/useCases/validators/errors/EmptyFieldError.ts rename to src/datasets/domain/useCases/validators/errors/EmptyFieldError.ts diff --git a/src/core/domain/useCases/validators/errors/FieldValidationError.ts b/src/datasets/domain/useCases/validators/errors/FieldValidationError.ts similarity index 88% rename from src/core/domain/useCases/validators/errors/FieldValidationError.ts rename to src/datasets/domain/useCases/validators/errors/FieldValidationError.ts index 6efa5e6e..2c3c6a2e 100644 --- a/src/core/domain/useCases/validators/errors/FieldValidationError.ts +++ b/src/datasets/domain/useCases/validators/errors/FieldValidationError.ts @@ -1,4 +1,4 @@ -import { ResourceValidationError } from './ResourceValidationError'; +import { ResourceValidationError } from '../../../../../core/domain/useCases/validators/errors/ResourceValidationError'; export class FieldValidationError extends ResourceValidationError { citationBlockName: string; diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index cdd14ce9..ba5713e9 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -7,8 +7,8 @@ import { } from '../../testHelpers/datasets/newDatasetHelper'; import { fail } from 'assert'; import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; -import { EmptyFieldError } from '../../../src/core/domain/useCases/validators/errors/EmptyFieldError'; -import { FieldValidationError } from '../../../src/core/domain/useCases/validators/errors/FieldValidationError'; +import { EmptyFieldError } from '../../../src/datasets/domain/useCases/validators/errors/EmptyFieldError'; +import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError'; import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; describe('execute', () => { From 9fda8b5167712f902e81363b050e64ecf2aaee71 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 16 Jan 2024 13:52:32 +0000 Subject: [PATCH 20/96] Added: createDataset use case export --- src/datasets/index.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/datasets/index.ts b/src/datasets/index.ts index 07a1574a..64fd5b63 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -7,6 +7,9 @@ import { GetPrivateUrlDatasetCitation } from './domain/useCases/GetPrivateUrlDat import { GetDatasetUserPermissions } from './domain/useCases/GetDatasetUserPermissions'; import { GetDatasetLocks } from './domain/useCases/GetDatasetLocks'; import { GetAllDatasetPreviews } from './domain/useCases/GetAllDatasetPreviews'; +import { NewDatasetValidator } from './domain/useCases/validators/NewDatasetValidator'; +import { MetadataBlocksRepository } from '../metadataBlocks/infra/repositories/MetadataBlocksRepository'; +import { CreateDataset } from './domain/useCases/CreateDataset'; const datasetsRepository = new DatasetsRepository(); @@ -19,6 +22,9 @@ const getDatasetUserPermissions = new GetDatasetUserPermissions(datasetsReposito const getDatasetLocks = new GetDatasetLocks(datasetsRepository); const getAllDatasetPreviews = new GetAllDatasetPreviews(datasetsRepository); +const newDatasetValidator = new NewDatasetValidator(new MetadataBlocksRepository()); +const createDataset = new CreateDataset(datasetsRepository, newDatasetValidator); + export { getDatasetSummaryFieldNames, getDataset, @@ -28,6 +34,7 @@ export { getDatasetUserPermissions, getDatasetLocks, getAllDatasetPreviews, + createDataset, }; export { DatasetNotNumberedVersion } from './domain/models/DatasetNotNumberedVersion'; export { DatasetUserPermissions } from './domain/models/DatasetUserPermissions'; From b36c23c7c1118e408d1d12a7aa4d8bacc53f43e6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 17 Jan 2024 10:50:56 +0000 Subject: [PATCH 21/96] Refactor: using NewDatasetMetadataFieldAndValueInfo to group multiple params in NewDatasetValidator --- .../validators/NewDatasetValidator.ts | 256 ++++++------------ 1 file changed, 86 insertions(+), 170 deletions(-) diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 61b1dfbc..88660e95 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -13,6 +13,15 @@ import { FieldValidationError } from './errors/FieldValidationError'; import { ControlledVocabularyFieldError } from './errors/ControlledVocabularyFieldError'; import { DateFormatFieldError } from './errors/DateFormatFieldError'; +export interface NewDatasetMetadataFieldAndValueInfo { + metadataFieldInfo: MetadataFieldInfo; + metadataFieldKey: string; + metadataFieldValue: NewDatasetMetadataFieldValue; + metadataBlockName: string; + metadataParentFieldKey?: string; + metadataFieldPosition?: number; +} + export class NewDatasetValidator implements NewResourceValidator { private metadataBlockRepository: IMetadataBlocksRepository; @@ -30,23 +39,18 @@ export class NewDatasetValidator implements NewResourceValidator { const metadataBlockName = metadataBlockValues.name; const metadataBlock = await this.metadataBlockRepository.getMetadataBlockByName(metadataBlockName); for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { - this.validateMetadataField( - metadataBlock.metadataFields[metadataFieldKey], - metadataFieldKey, - metadataBlockValues.fields[metadataFieldKey], - metadataBlockName, - ); + this.validateMetadataField({ + metadataFieldInfo: metadataBlock.metadataFields[metadataFieldKey], + metadataFieldKey: metadataFieldKey, + metadataFieldValue: metadataBlockValues.fields[metadataFieldKey], + metadataBlockName: metadataBlockName, + }); } } - private validateMetadataField( - metadataFieldInfo: MetadataFieldInfo, - metadataFieldKey: string, - metadataFieldValue: NewDatasetMetadataFieldValue, - metadataBlockName: string, - metadataParentFieldKey?: string, - metadataFieldPosition?: number, - ): void { + private validateMetadataField(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; if ( metadataFieldValue == undefined || metadataFieldValue == null || @@ -54,63 +58,37 @@ export class NewDatasetValidator implements NewResourceValidator { this.isEmptyArray(metadataFieldValue) ) { if (metadataFieldInfo.isRequired) { - throw new EmptyFieldError(metadataFieldKey, metadataBlockName, metadataParentFieldKey, metadataFieldPosition); + throw new EmptyFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); } else { return; } } if (metadataFieldInfo.multiple) { - this.validateMultipleMetadataField( - metadataFieldValue, - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldInfo, - metadataFieldPosition, - ); + this.validateMultipleMetadataField(newDatasetMetadataFieldAndValueInfo); } else { - this.validateSingleMetadataField( - metadataFieldValue, - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldInfo, - metadataFieldPosition, - ); + this.validateSingleMetadataField(newDatasetMetadataFieldAndValueInfo); } } - private validateMultipleMetadataField( - metadataFieldValue: NewDatasetMetadataFieldValue, - metadataFieldKey: string, - metadataBlockName: string, - metadataParentFieldKey: string, - metadataFieldInfo: MetadataFieldInfo, - metadataFieldPosition: number, - ) { + private validateMultipleMetadataField(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; if (!Array.isArray(metadataFieldValue)) { - throw this.createGeneralValidationError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, - 'Expecting an array of values.', - ); + throw this.createGeneralValidationError(newDatasetMetadataFieldAndValueInfo, 'Expecting an array of values.'); } if (this.isValidArrayType(metadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { throw this.createGeneralValidationError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo, 'Expecting an array of child fields, not strings.', ); } else if (this.isValidArrayType(metadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { throw this.createGeneralValidationError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo, 'Expecting an array of strings, not child fields.', ); } else if ( @@ -118,166 +96,107 @@ export class NewDatasetValidator implements NewResourceValidator { !this.isValidArrayType(metadataFieldValue, 'string') ) { throw this.createGeneralValidationError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo, 'The provided array of values is not valid.', ); } const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValue[]; fieldValues.forEach((value, metadataFieldPosition) => { - this.validateFieldValue( - metadataFieldInfo, - value, - metadataBlockName, - metadataFieldKey, - metadataParentFieldKey, - metadataFieldPosition, - ); + this.validateFieldValue({ + metadataFieldInfo: metadataFieldInfo, + metadataFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldValue: value, + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: metadataFieldPosition, + }); }); } - private validateSingleMetadataField( - metadataFieldValue: NewDatasetMetadataFieldValue, - metadataFieldKey: string, - metadataBlockName: string, - metadataParentFieldKey: string, - metadataFieldInfo: MetadataFieldInfo, - metadataFieldPosition: number, - ) { + private validateSingleMetadataField(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; if (Array.isArray(metadataFieldValue)) { throw this.createGeneralValidationError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo, 'Expecting a single field, not an array.', ); } if (typeof metadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { throw this.createGeneralValidationError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo, 'Expecting a string, not child fields.', ); } if (typeof metadataFieldValue === 'string' && metadataFieldInfo.type === 'NONE') { throw this.createGeneralValidationError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo, 'Expecting child fields, not a string.', ); } - this.validateFieldValue( - metadataFieldInfo, - metadataFieldValue, - metadataBlockName, - metadataFieldKey, - metadataParentFieldKey, - metadataFieldPosition, - ); + this.validateFieldValue(newDatasetMetadataFieldAndValueInfo); } - private validateFieldValue( - metadataFieldInfo: MetadataFieldInfo, - value: NewDatasetMetadataFieldValue, - metadataBlockName: string, - metadataFieldKey: string, - metadataParentFieldKey: string, - metadataFieldPosition: number, - ) { + private validateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; if (metadataFieldInfo.isControlledVocabulary) { - this.validateControlledVocabularyFieldValue( - metadataFieldInfo, - value as string, - metadataBlockName, - metadataFieldKey, - metadataParentFieldKey, - metadataFieldPosition, - ); + this.validateControlledVocabularyFieldValue(newDatasetMetadataFieldAndValueInfo); } if (metadataFieldInfo.type == 'DATE') { - this.validateDateFieldValue( - value as string, - metadataBlockName, - metadataFieldKey, - metadataParentFieldKey, - metadataFieldPosition, - ); + this.validateDateFieldValue(newDatasetMetadataFieldAndValueInfo); } if (metadataFieldInfo.childMetadataFields != undefined) { - this.validateChildMetadataFieldValues( - metadataFieldInfo, - value as NewDatasetMetadataChildFieldValue, - metadataBlockName, - metadataFieldKey, - metadataFieldPosition, - ); + this.validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo); } } private validateControlledVocabularyFieldValue( - metadataFieldInfo: MetadataFieldInfo, - controledVocabularyValue: string, - metadataBlockName: string, - metadataFieldKey: string, - metadataParentFieldKey?: string, - metadataFieldPosition?: number, + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, ) { - if (!metadataFieldInfo.controlledVocabularyValues.includes(controledVocabularyValue)) { + if ( + !newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.controlledVocabularyValues.includes( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string, + ) + ) { throw new ControlledVocabularyFieldError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, ); } } - private validateDateFieldValue( - dateFieldValue: string, - metadataBlockName: string, - metadataFieldKey: string, - metadataParentFieldKey?: string, - metadataFieldPosition?: number, - ) { + private validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; - if (!dateFormatRegex.test(dateFieldValue)) { + if (!dateFormatRegex.test(newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string)) { throw new DateFormatFieldError( - metadataFieldKey, - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, ); } } - private validateChildMetadataFieldValues( - metadataFieldInfo: MetadataFieldInfo, - metadataChildFieldValue: NewDatasetMetadataChildFieldValue, - metadataBlockName: string, - metadataParentFieldKey: string, - metadataFieldPosition?: number, - ) { + private validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); for (const childMetadataFieldKey of childMetadataFieldKeys) { const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; - this.validateMetadataField( - childMetadataFieldInfo, - childMetadataFieldKey, - metadataChildFieldValue[childMetadataFieldKey], - metadataBlockName, - metadataParentFieldKey, - metadataFieldPosition, - ); + this.validateMetadataField({ + metadataFieldInfo: childMetadataFieldInfo, + metadataFieldKey: childMetadataFieldKey, + metadataFieldValue: ( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValue + )[childMetadataFieldKey], + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + }); } } @@ -297,17 +216,14 @@ export class NewDatasetValidator implements NewResourceValidator { } private createGeneralValidationError( - metadataFieldKey: string, - metadataBlockName: string, - parentMetadataFieldName: string | undefined, - metadataFieldPosition: number | undefined, + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, reason: string, ): FieldValidationError { return new FieldValidationError( - metadataFieldKey, - metadataBlockName, - parentMetadataFieldName, - metadataFieldPosition, + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, reason, ); } From c9bdf84ba1afda2d09a9cf106757e18d65b3d3a6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 17 Jan 2024 15:18:33 +0000 Subject: [PATCH 22/96] Added: typeClass property to MetadataFieldInfo --- src/metadataBlocks/domain/models/MetadataBlock.ts | 1 + .../transformers/metadataBlockTransformers.ts | 6 +++--- test/testHelpers/datasets/newDatasetHelper.ts | 9 +++++++++ test/testHelpers/metadataBlocks/metadataBlockHelper.ts | 8 ++++++++ 4 files changed, 21 insertions(+), 3 deletions(-) diff --git a/src/metadataBlocks/domain/models/MetadataBlock.ts b/src/metadataBlocks/domain/models/MetadataBlock.ts index dc60b5b1..834d7908 100644 --- a/src/metadataBlocks/domain/models/MetadataBlock.ts +++ b/src/metadataBlocks/domain/models/MetadataBlock.ts @@ -10,6 +10,7 @@ export interface MetadataFieldInfo { displayName: string; title: string; type: string; + typeClass: string; watermark: string; description: string; multiple: boolean; diff --git a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts index 35685156..5f29b60c 100644 --- a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts +++ b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts @@ -31,9 +31,9 @@ const transformPayloadMetadataFieldInfo = ( multiple: metadataFieldInfoPayload.multiple, isControlledVocabulary: metadataFieldInfoPayload.isControlledVocabulary, displayFormat: metadataFieldInfoPayload.displayFormat, - // TODO - isRequired: true, - displayOrder: 0, + isRequired: metadataFieldInfoPayload.isRequired, + displayOrder: metadataFieldInfoPayload.displayOrder, + typeClass: metadataFieldInfoPayload.typeClass, }; if (!isChild && metadataFieldInfoPayload.hasOwnProperty('childFields')) { const childMetadataFieldsPayload = metadataFieldInfoPayload.childFields; diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index f24fab2d..642e4752 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -99,6 +99,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { displayFormat: '#VALUE', isRequired: true, displayOrder: 0, + typeClass: 'primitive', }, author: { name: 'author', @@ -112,6 +113,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { displayFormat: '#VALUE', isRequired: true, displayOrder: 1, + typeClass: 'compound', childMetadataFields: { authorName: { name: 'authorName', @@ -125,6 +127,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { displayFormat: '#VALUE', isRequired: true, displayOrder: 2, + typeClass: 'primitive', }, authorAffiliation: { name: 'authorAffiliation', @@ -138,6 +141,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { displayFormat: '#VALUE', isRequired: false, displayOrder: 3, + typeClass: 'primitive', }, }, }, @@ -153,6 +157,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { displayFormat: '', isRequired: true, displayOrder: 4, + typeClass: 'primitive', }, timePeriodCoveredStart: { name: 'timePeriodCoveredStart', @@ -166,6 +171,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { displayFormat: '#NAME: #VALUE ', isRequired: false, displayOrder: 5, + typeClass: 'primitive', }, contributor: { name: 'contributor', @@ -180,6 +186,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { displayFormat: ':', isRequired: false, displayOrder: 6, + typeClass: 'compound', childMetadataFields: { contributorType: { name: 'contributorType', @@ -212,6 +219,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { 'Work Package Leader', 'Other', ], + typeClass: 'controlledVocabulary', }, contributorName: { name: 'contributorName', @@ -225,6 +233,7 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { displayFormat: '#VALUE', isRequired: true, displayOrder: 8, + typeClass: 'primitive', }, }, }, diff --git a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts index 4ff63661..28b013be 100644 --- a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts +++ b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts @@ -18,6 +18,7 @@ export const createMetadataBlockModel = (): MetadataBlock => { displayFormat: '#VALUE', isRequired: true, displayOrder: 0, + typeClass: 'primitive', }, testField2: { name: 'testName2', @@ -31,6 +32,7 @@ export const createMetadataBlockModel = (): MetadataBlock => { displayFormat: '', isRequired: true, displayOrder: 0, + typeClass: 'compound', childMetadataFields: { testField3: { name: 'testName3', @@ -44,6 +46,7 @@ export const createMetadataBlockModel = (): MetadataBlock => { displayFormat: '#VALUE', isRequired: true, displayOrder: 0, + typeClass: 'primitive', }, testField4: { name: 'testName4', @@ -57,6 +60,7 @@ export const createMetadataBlockModel = (): MetadataBlock => { displayFormat: '#VALUE', isRequired: true, displayOrder: 0, + typeClass: 'primitive', }, }, }, @@ -82,6 +86,7 @@ export const createMetadataBlockPayload = (): any => { displayFormat: '#VALUE', isRequired: true, displayOrder: 0, + typeClass: 'primitive', }, testField2: { name: 'testName2', @@ -95,6 +100,7 @@ export const createMetadataBlockPayload = (): any => { displayFormat: '', isRequired: true, displayOrder: 0, + typeClass: 'compound', childFields: { testField3: { name: 'testName3', @@ -108,6 +114,7 @@ export const createMetadataBlockPayload = (): any => { displayFormat: '#VALUE', isRequired: true, displayOrder: 0, + typeClass: 'primitive', }, testField4: { name: 'testName4', @@ -121,6 +128,7 @@ export const createMetadataBlockPayload = (): any => { displayFormat: '#VALUE', isRequired: true, displayOrder: 0, + typeClass: 'primitive', }, }, }, From 76775e464be22308f25768c1e3a091c45ef0c485 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 18 Jan 2024 13:24:54 +0000 Subject: [PATCH 23/96] Added: newDatasetTransformers WIP and calculating metadatablocks before calling validation and repository method --- .../validators/NewResourceValidator.ts | 5 +- src/datasets/domain/models/NewDataset.ts | 3 + .../repositories/IDatasetsRepository.ts | 3 +- src/datasets/domain/useCases/CreateDataset.ts | 31 +++- .../validators/NewDatasetValidator.ts | 24 ++-- src/datasets/index.ts | 4 +- .../infra/repositories/DatasetsRepository.ts | 5 +- .../transformers/newDatasetTransformers.ts | 135 ++++++++++++++++++ test/unit/datasets/CreateDataset.test.ts | 34 +++-- .../unit/datasets/NewDatasetValidator.test.ts | 30 ++-- 10 files changed, 216 insertions(+), 58 deletions(-) create mode 100644 src/datasets/infra/repositories/transformers/newDatasetTransformers.ts diff --git a/src/core/domain/useCases/validators/NewResourceValidator.ts b/src/core/domain/useCases/validators/NewResourceValidator.ts index a816bd07..2abb70ae 100644 --- a/src/core/domain/useCases/validators/NewResourceValidator.ts +++ b/src/core/domain/useCases/validators/NewResourceValidator.ts @@ -1,5 +1,6 @@ import { ResourceValidationError } from './errors/ResourceValidationError'; -export interface NewResourceValidator { - validate(resource: T): Promise; +export interface NewResourceValidator { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + validate(...args: any[]): Promise; } diff --git a/src/datasets/domain/models/NewDataset.ts b/src/datasets/domain/models/NewDataset.ts index eaa0bd73..2795c192 100644 --- a/src/datasets/domain/models/NewDataset.ts +++ b/src/datasets/domain/models/NewDataset.ts @@ -1,4 +1,7 @@ +import { DatasetLicense } from './Dataset'; + export interface NewDataset { + license?: DatasetLicense; metadataBlockValues: NewDatasetMetadataBlockValues[]; } diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index 698fc261..06ff92a9 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -3,6 +3,7 @@ import { DatasetUserPermissions } from '../models/DatasetUserPermissions'; import { DatasetLock } from '../models/DatasetLock'; import { DatasetPreviewSubset } from '../models/DatasetPreviewSubset'; import { NewDataset } from '../models/NewDataset'; +import { MetadataBlock } from '../../../metadataBlocks'; export interface IDatasetsRepository { getDatasetSummaryFieldNames(): Promise; @@ -13,5 +14,5 @@ export interface IDatasetsRepository { getDatasetUserPermissions(datasetId: number | string): Promise; getDatasetLocks(datasetId: number | string): Promise; getAllDatasetPreviews(limit?: number, offset?: number): Promise; - createDataset(newDataset: NewDataset): Promise; + createDataset(newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[]): Promise; } diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts index 77b188af..1d5905d4 100644 --- a/src/datasets/domain/useCases/CreateDataset.ts +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -1,20 +1,41 @@ import { UseCase } from '../../../core/domain/useCases/UseCase'; import { IDatasetsRepository } from '../repositories/IDatasetsRepository'; -import { NewDataset } from '../models/NewDataset'; +import { NewDataset, NewDatasetMetadataBlockValues } from '../models/NewDataset'; import { NewResourceValidator } from '../../../core/domain/useCases/validators/NewResourceValidator'; +import { IMetadataBlocksRepository } from '../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; +import { MetadataBlock } from '../../../metadataBlocks'; export class CreateDataset implements UseCase { private datasetsRepository: IDatasetsRepository; - private newDatasetValidator: NewResourceValidator; + private metadataBlocksRepository: IMetadataBlocksRepository; + private newDatasetValidator: NewResourceValidator; - constructor(datasetsRepository: IDatasetsRepository, newDatasetValidator: NewResourceValidator) { + constructor( + datasetsRepository: IDatasetsRepository, + metadataBlocksRepository: IMetadataBlocksRepository, + newDatasetValidator: NewResourceValidator, + ) { this.datasetsRepository = datasetsRepository; + this.metadataBlocksRepository = metadataBlocksRepository; this.newDatasetValidator = newDatasetValidator; } async execute(newDataset: NewDataset): Promise { - return await this.newDatasetValidator.validate(newDataset).then(async () => { - return await this.datasetsRepository.createDataset(newDataset); + const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset); + return await this.newDatasetValidator.validate(newDataset, metadataBlocks).then(async () => { + return await this.datasetsRepository.createDataset(newDataset, metadataBlocks); }); } + + async getNewDatasetMetadataBlocks(newDataset: NewDataset): Promise { + let metadataBlocks: MetadataBlock[] = []; + for (const metadataBlockValue in newDataset.metadataBlockValues) { + metadataBlocks.push( + await this.metadataBlocksRepository.getMetadataBlockByName( + (metadataBlockValue as unknown as NewDatasetMetadataBlockValues).name, + ), + ); + } + return metadataBlocks; + } } diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 88660e95..712fb1f2 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -5,8 +5,7 @@ import { NewDatasetMetadataBlockValues, } from '../../models/NewDataset'; import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; -import { IMetadataBlocksRepository } from '../../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; -import { MetadataFieldInfo } from '../../../../metadataBlocks'; +import { MetadataFieldInfo, MetadataBlock } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; import { EmptyFieldError } from './errors/EmptyFieldError'; import { FieldValidationError } from './errors/FieldValidationError'; @@ -22,22 +21,21 @@ export interface NewDatasetMetadataFieldAndValueInfo { metadataFieldPosition?: number; } -export class NewDatasetValidator implements NewResourceValidator { - private metadataBlockRepository: IMetadataBlocksRepository; - - constructor(metadataBlockRepository: IMetadataBlocksRepository) { - this.metadataBlockRepository = metadataBlockRepository; - } - - async validate(resource: NewDataset): Promise { +export class NewDatasetValidator implements NewResourceValidator { + async validate(resource: NewDataset, metadataBlocks: MetadataBlock[]): Promise { for (const metadataBlockValues of resource.metadataBlockValues) { - await this.validateMetadataBlock(metadataBlockValues); + await this.validateMetadataBlock(metadataBlockValues, metadataBlocks); } } - private async validateMetadataBlock(metadataBlockValues: NewDatasetMetadataBlockValues) { + private async validateMetadataBlock( + metadataBlockValues: NewDatasetMetadataBlockValues, + metadataBlocks: MetadataBlock[], + ) { const metadataBlockName = metadataBlockValues.name; - const metadataBlock = await this.metadataBlockRepository.getMetadataBlockByName(metadataBlockName); + const metadataBlock: MetadataBlock = metadataBlocks.find( + (metadataBlock) => metadataBlock.name === metadataBlockName, + ); for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { this.validateMetadataField({ metadataFieldInfo: metadataBlock.metadataFields[metadataFieldKey], diff --git a/src/datasets/index.ts b/src/datasets/index.ts index 64fd5b63..4e67636a 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -21,9 +21,7 @@ const getPrivateUrlDatasetCitation = new GetPrivateUrlDatasetCitation(datasetsRe const getDatasetUserPermissions = new GetDatasetUserPermissions(datasetsRepository); const getDatasetLocks = new GetDatasetLocks(datasetsRepository); const getAllDatasetPreviews = new GetAllDatasetPreviews(datasetsRepository); - -const newDatasetValidator = new NewDatasetValidator(new MetadataBlocksRepository()); -const createDataset = new CreateDataset(datasetsRepository, newDatasetValidator); +const createDataset = new CreateDataset(datasetsRepository, new MetadataBlocksRepository(), new NewDatasetValidator()); export { getDatasetSummaryFieldNames, diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index aae73f21..537c1df3 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -9,6 +9,7 @@ import { transformDatasetLocksResponseToDatasetLocks } from './transformers/data import { transformDatasetPreviewsResponseToDatasetPreviewSubset } from './transformers/datasetPreviewsTransformers'; import { DatasetPreviewSubset } from '../../domain/models/DatasetPreviewSubset'; import { NewDataset } from '../../domain/models/NewDataset'; +import { MetadataBlock } from '../../../metadataBlocks'; export interface GetAllDatasetPreviewsQueryParams { per_page?: number; @@ -108,7 +109,7 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi }); } - public async createDataset(newDataset: NewDataset): Promise { - console.log(newDataset); + public async createDataset(newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[]): Promise { + console.log(newDataset + ' ' + datasetMetadataBlocks.length); } } diff --git a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts new file mode 100644 index 00000000..8713ddc0 --- /dev/null +++ b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts @@ -0,0 +1,135 @@ +import { + NewDataset, + NewDatasetMetadataBlockValues, + NewDatasetMetadataFields, + NewDatasetMetadataFieldValue, + NewDatasetMetadataChildFieldValue, +} from '../../../domain/models/NewDataset'; +import { DatasetLicense } from '../../../domain/models/Dataset'; +import { MetadataBlock, MetadataFieldInfo } from '../../../../metadataBlocks'; + +export interface NewDatasetRequestPayload { + license?: DatasetLicense; + metadataBlocks: Record; +} + +export interface MetadataBlockRequestPayload { + fields: MetadataFieldRequestPayload[]; + displayName: string; +} + +export interface MetadataFieldRequestPayload { + value: MetadataFieldValueRequestPayload; + typeClass: string; + multiple: boolean; + typeName: string; +} + +export type MetadataFieldValueRequestPayload = + | string + | string[] + | Record + | Record[]; + +export const transformNewDatasetModelToRequestPayload = ( + newDataset: NewDataset, + metadataBlocks: MetadataBlock[], +): NewDatasetRequestPayload => { + return { + license: newDataset.license, + metadataBlocks: transformMetadataBlockModelsToRequestPayload(newDataset.metadataBlockValues, metadataBlocks), + }; +}; + +export const transformMetadataBlockModelsToRequestPayload = ( + metadataBlockValuesModels: NewDatasetMetadataBlockValues[], + metadataBlocks: MetadataBlock[], +): Record => { + let metadataBlocksRequestPayload: Record = {}; + for (const item in metadataBlockValuesModels) { + const metadataBlockValuesModel: NewDatasetMetadataBlockValues = item as unknown as NewDatasetMetadataBlockValues; + const metadataBlock: MetadataBlock = metadataBlocks.find( + (metadataBlock) => metadataBlock.name === (item as unknown as NewDatasetMetadataBlockValues).name, + ); + metadataBlocksRequestPayload[metadataBlockValuesModel.name] = { + displayName: metadataBlock.displayName, + fields: transformMetadataFieldModelsToRequestPayload( + metadataBlockValuesModel.fields, + metadataBlock.metadataFields, + ), + }; + } + return metadataBlocksRequestPayload; +}; + +export const transformMetadataFieldModelsToRequestPayload = ( + metadataFieldsModel: NewDatasetMetadataFields, + metadataFields: Record, +): MetadataFieldRequestPayload[] => { + let metadataFieldsRequestPayload: MetadataFieldRequestPayload[] = []; + for (const metadataFieldKey of Object.keys(metadataFieldsModel)) { + const metadataFieldValue: NewDatasetMetadataFieldValue = metadataFieldsModel[metadataFieldKey]; + metadataFieldsRequestPayload.push( + transformMetadataFieldValueToRequestPayload( + metadataFieldValue, + metadataFieldKey, + metadataFields[metadataFieldKey], + ), + ); + } + return metadataFieldsRequestPayload; +}; + +export const transformMetadataFieldValueToRequestPayload = ( + metadataFieldValue: NewDatasetMetadataFieldValue, + metadataFieldKey: string, + metadataFieldInfo: MetadataFieldInfo, +): MetadataFieldRequestPayload => { + let value: MetadataFieldValueRequestPayload; + if (Array.isArray(metadataFieldValue)) { + if (metadataFieldValue.every((item: unknown) => typeof item === 'string')) { + value = metadataFieldValue as string[]; + } else { + let value: Record[] = []; + for (const item in metadataFieldValue as NewDatasetMetadataChildFieldValue[]) { + value.push( + transformMetadataChildFieldValueToRequestPayload( + item as unknown as NewDatasetMetadataChildFieldValue, + metadataFieldInfo, + ), + ); + } + } + } else if (typeof metadataFieldValue == 'string') { + value = metadataFieldValue; + } else { + value = transformMetadataChildFieldValueToRequestPayload( + metadataFieldValue as unknown as NewDatasetMetadataChildFieldValue, + metadataFieldInfo, + ); + } + return { + value: value, + typeClass: metadataFieldInfo.typeClass, + multiple: metadataFieldInfo.multiple, + typeName: metadataFieldKey, + }; +}; + +export const transformMetadataChildFieldValueToRequestPayload = ( + metadataFieldValue: NewDatasetMetadataChildFieldValue, + metadataFieldInfo: MetadataFieldInfo, +): Record => { + let metadataChildFieldRequestPayload: Record = {}; + for (const metadataChildFieldKey of Object.keys(metadataFieldValue)) { + const childMetadataFieldInfo: MetadataFieldInfo = metadataFieldInfo.childMetadataFields[metadataChildFieldKey]; + const value: string = metadataFieldValue[metadataChildFieldKey] as unknown as string; + metadataChildFieldRequestPayload[metadataChildFieldKey] = { + value: value, + typeClass: childMetadataFieldInfo.typeClass, + multiple: childMetadataFieldInfo.multiple, + typeName: metadataChildFieldKey, + }; + } + return metadataChildFieldRequestPayload; +}; diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts index 78897bd6..260e68f1 100644 --- a/test/unit/datasets/CreateDataset.test.ts +++ b/test/unit/datasets/CreateDataset.test.ts @@ -2,34 +2,42 @@ import { CreateDataset } from '../../../src/datasets/domain/useCases/CreateDatas import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository'; import { assert, createSandbox, SinonSandbox } from 'sinon'; import { NewResourceValidator } from '../../../src/core/domain/useCases/validators/NewResourceValidator'; -import { createNewDatasetModel } from '../../testHelpers/datasets/newDatasetHelper'; -import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; +import { createNewDatasetModel, createNewDatasetMetadataBlockModel } from '../../testHelpers/datasets/newDatasetHelper'; import { ResourceValidationError } from '../../../src/core/domain/useCases/validators/errors/ResourceValidationError'; import { WriteError } from '../../../src'; +import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; describe('execute', () => { const sandbox: SinonSandbox = createSandbox(); const testDataset = createNewDatasetModel(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; afterEach(() => { sandbox.restore(); }); + function setupMetadataBlocksRepositoryStub(): IMetadataBlocksRepository { + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + return metadataBlocksRepositoryStub; + } + test('should call repository when validation is successful', async () => { const datasetsRepositoryStub = {}; const createDatasetStub = sandbox.stub(); datasetsRepositoryStub.createDataset = createDatasetStub; - const newDatasetValidatorStub = >{}; + const newDatasetValidatorStub = {}; const validateStub = sandbox.stub().resolves(); newDatasetValidatorStub.validate = validateStub; - const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorStub); + const sut = new CreateDataset(datasetsRepositoryStub, setupMetadataBlocksRepositoryStub(), newDatasetValidatorStub); await sut.execute(testDataset); - assert.calledWithExactly(validateStub, testDataset); - assert.calledWithExactly(createDatasetStub, testDataset); + assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks); assert.callOrder(validateStub, createDatasetStub); }); @@ -39,17 +47,17 @@ describe('execute', () => { const createDatasetStub = sandbox.stub(); datasetsRepositoryStub.createDataset = createDatasetStub; - const newDatasetValidatorStub = >{}; + const newDatasetValidatorStub = {}; const testValidationError = new ResourceValidationError('Test error'); const validateStub = sandbox.stub().throwsException(testValidationError); newDatasetValidatorStub.validate = validateStub; - const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorStub); + const sut = new CreateDataset(datasetsRepositoryStub, setupMetadataBlocksRepositoryStub(), newDatasetValidatorStub); let actualError: ResourceValidationError = undefined; await sut.execute(testDataset).catch((e) => (actualError = e)); assert.match(actualError, testValidationError); - assert.calledWithExactly(validateStub, testDataset); + assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); assert.notCalled(createDatasetStub); }); @@ -59,17 +67,17 @@ describe('execute', () => { const createDatasetStub = sandbox.stub().throwsException(testWriteError); datasetsRepositoryStub.createDataset = createDatasetStub; - const newDatasetValidatorStub = >{}; + const newDatasetValidatorStub = {}; const validateMock = sandbox.stub().resolves(); newDatasetValidatorStub.validate = validateMock; - const sut = new CreateDataset(datasetsRepositoryStub, newDatasetValidatorStub); + const sut = new CreateDataset(datasetsRepositoryStub, setupMetadataBlocksRepositoryStub(), newDatasetValidatorStub); let actualError: ResourceValidationError = undefined; await sut.execute(testDataset).catch((e) => (actualError = e)); assert.match(actualError, testWriteError); - assert.calledWithExactly(validateMock, testDataset); - assert.calledWithExactly(createDatasetStub, testDataset); + assert.calledWithExactly(validateMock, testDataset, testMetadataBlocks); + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks); assert.callOrder(validateMock, createDatasetStub); }); diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index ba5713e9..daf77e27 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -6,26 +6,18 @@ import { createNewDatasetModelWithoutFirstLevelRequiredField, } from '../../testHelpers/datasets/newDatasetHelper'; import { fail } from 'assert'; -import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { EmptyFieldError } from '../../../src/datasets/domain/useCases/validators/errors/EmptyFieldError'; import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError'; import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; describe('execute', () => { const sandbox: SinonSandbox = createSandbox(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; afterEach(() => { sandbox.restore(); }); - function setupMetadataBlocksRepositoryStub(): IMetadataBlocksRepository { - const testMetadataBlock = createNewDatasetMetadataBlockModel(); - const metadataBlocksRepositoryStub = {}; - const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlock); - metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; - return metadataBlocksRepositoryStub; - } - async function runValidateExpectingFieldValidationError( newDataset: NewDataset, expectedMetadataFieldName: string, @@ -33,9 +25,9 @@ describe('execute', () => { expectedParentMetadataFieldName?: string, expectedPosition?: number, ): Promise { - const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); + const sut = new NewDatasetValidator(); await sut - .validate(newDataset) + .validate(newDataset, testMetadataBlocks) .then(() => { fail('Validation should fail'); }) @@ -51,9 +43,9 @@ describe('execute', () => { test('should not raise a validation error when a new dataset with only the required fields is valid', async () => { const testNewDataset = createNewDatasetModel(); - const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); + const sut = new NewDatasetValidator(); - await sut.validate(testNewDataset).catch((e) => fail(e)); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); test('should raise an empty field error when a first level required string field is missing', async () => { @@ -167,8 +159,8 @@ describe('execute', () => { }, ]; const testNewDataset = createNewDatasetModel(undefined, authorFieldValue, undefined); - const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); - await sut.validate(testNewDataset).catch((e) => fail(e)); + const sut = new NewDatasetValidator(); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); test('should raise a date format validation error when a date field has an invalid format', async () => { @@ -182,8 +174,8 @@ describe('execute', () => { test('should not raise a date format validation error when a date field has a valid format', async () => { const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, '2020-01-01'); - const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); - await sut.validate(testNewDataset).catch((e) => fail(e)); + const sut = new NewDatasetValidator(); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); test('should raise a controlled vocabulary error when a controlled vocabulary field has an invalid format', async () => { @@ -199,7 +191,7 @@ describe('execute', () => { test('should not raise a controlled vocabulary error when the value for a controlled vocabulary field is correct', async () => { const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, undefined, 'Project Member'); - const sut = new NewDatasetValidator(setupMetadataBlocksRepositoryStub()); - await sut.validate(testNewDataset).catch((e) => fail(e)); + const sut = new NewDatasetValidator(); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); }); From 1a1d08a9929126f28f1002dc6e070be08714e031 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 18 Jan 2024 13:29:21 +0000 Subject: [PATCH 24/96] Fixed: test name --- test/unit/datasets/NewDatasetValidator.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index daf77e27..c083ba06 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -10,7 +10,7 @@ import { EmptyFieldError } from '../../../src/datasets/domain/useCases/validator import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError'; import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; -describe('execute', () => { +describe('validate', () => { const sandbox: SinonSandbox = createSandbox(); const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; From 46222d920359c270747c86256a82cf3c06fe03b8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 18 Jan 2024 13:55:56 +0000 Subject: [PATCH 25/96] Fixed: newDatasetTransformers --- .../transformers/newDatasetTransformers.ts | 22 ++++++++----------- .../datasets/newDatasetTransformers.test.ts | 19 ++++++++++++++++ 2 files changed, 28 insertions(+), 13 deletions(-) create mode 100644 test/unit/datasets/newDatasetTransformers.test.ts diff --git a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts index 8713ddc0..5da33c3a 100644 --- a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts +++ b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts @@ -36,7 +36,7 @@ export const transformNewDatasetModelToRequestPayload = ( metadataBlocks: MetadataBlock[], ): NewDatasetRequestPayload => { return { - license: newDataset.license, + ...(newDataset.license && { license: newDataset.license }), metadataBlocks: transformMetadataBlockModelsToRequestPayload(newDataset.metadataBlockValues, metadataBlocks), }; }; @@ -46,10 +46,9 @@ export const transformMetadataBlockModelsToRequestPayload = ( metadataBlocks: MetadataBlock[], ): Record => { let metadataBlocksRequestPayload: Record = {}; - for (const item in metadataBlockValuesModels) { - const metadataBlockValuesModel: NewDatasetMetadataBlockValues = item as unknown as NewDatasetMetadataBlockValues; + metadataBlockValuesModels.forEach(function (metadataBlockValuesModel: NewDatasetMetadataBlockValues) { const metadataBlock: MetadataBlock = metadataBlocks.find( - (metadataBlock) => metadataBlock.name === (item as unknown as NewDatasetMetadataBlockValues).name, + (metadataBlock) => metadataBlock.name == metadataBlockValuesModel.name, ); metadataBlocksRequestPayload[metadataBlockValuesModel.name] = { displayName: metadataBlock.displayName, @@ -58,7 +57,7 @@ export const transformMetadataBlockModelsToRequestPayload = ( metadataBlock.metadataFields, ), }; - } + }); return metadataBlocksRequestPayload; }; @@ -87,26 +86,23 @@ export const transformMetadataFieldValueToRequestPayload = ( ): MetadataFieldRequestPayload => { let value: MetadataFieldValueRequestPayload; if (Array.isArray(metadataFieldValue)) { - if (metadataFieldValue.every((item: unknown) => typeof item === 'string')) { + if (typeof metadataFieldValue[0] == 'string') { value = metadataFieldValue as string[]; } else { let value: Record[] = []; - for (const item in metadataFieldValue as NewDatasetMetadataChildFieldValue[]) { + metadataFieldValue.forEach(function (metadataFieldValue: NewDatasetMetadataFieldValue) { value.push( transformMetadataChildFieldValueToRequestPayload( - item as unknown as NewDatasetMetadataChildFieldValue, + metadataFieldValue as NewDatasetMetadataChildFieldValue, metadataFieldInfo, ), ); - } + }); } } else if (typeof metadataFieldValue == 'string') { value = metadataFieldValue; } else { - value = transformMetadataChildFieldValueToRequestPayload( - metadataFieldValue as unknown as NewDatasetMetadataChildFieldValue, - metadataFieldInfo, - ); + value = transformMetadataChildFieldValueToRequestPayload(metadataFieldValue, metadataFieldInfo); } return { value: value, diff --git a/test/unit/datasets/newDatasetTransformers.test.ts b/test/unit/datasets/newDatasetTransformers.test.ts new file mode 100644 index 00000000..ed3dfdc5 --- /dev/null +++ b/test/unit/datasets/newDatasetTransformers.test.ts @@ -0,0 +1,19 @@ +import { createSandbox, SinonSandbox } from 'sinon'; +import { createNewDatasetMetadataBlockModel, createNewDatasetModel } from '../../testHelpers/datasets/newDatasetHelper'; +import { transformNewDatasetModelToRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; + +describe('transformNewDatasetModelToRequestPayload', () => { + const sandbox: SinonSandbox = createSandbox(); + + afterEach(() => { + sandbox.restore(); + }); + + test('should not raise a validation error when a new dataset with only the required fields is valid', async () => { + const testNewDataset = createNewDatasetModel(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + + const actual = transformNewDatasetModelToRequestPayload(testNewDataset, testMetadataBlocks); + console.log(actual); + }); +}); From 529686415a13e1a42cd152c39c1f0b3eac6956a3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 18 Jan 2024 15:23:47 +0000 Subject: [PATCH 26/96] Stash: newDatasetTransformers fixes and tests WIP --- .../transformers/newDatasetTransformers.ts | 14 ++-- test/testHelpers/datasets/newDatasetHelper.ts | 8 ++ .../datasets/newDatasetTransformers.test.ts | 74 ++++++++++++++++++- 3 files changed, 87 insertions(+), 9 deletions(-) diff --git a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts index 5da33c3a..6c8d2021 100644 --- a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts +++ b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts @@ -9,8 +9,10 @@ import { DatasetLicense } from '../../../domain/models/Dataset'; import { MetadataBlock, MetadataFieldInfo } from '../../../../metadataBlocks'; export interface NewDatasetRequestPayload { - license?: DatasetLicense; - metadataBlocks: Record; + datasetVersion: { + license?: DatasetLicense; + metadataBlocks: Record; + }; } export interface MetadataBlockRequestPayload { @@ -36,8 +38,10 @@ export const transformNewDatasetModelToRequestPayload = ( metadataBlocks: MetadataBlock[], ): NewDatasetRequestPayload => { return { - ...(newDataset.license && { license: newDataset.license }), - metadataBlocks: transformMetadataBlockModelsToRequestPayload(newDataset.metadataBlockValues, metadataBlocks), + datasetVersion: { + ...(newDataset.license && { license: newDataset.license }), + metadataBlocks: transformMetadataBlockModelsToRequestPayload(newDataset.metadataBlockValues, metadataBlocks), + }, }; }; @@ -51,11 +55,11 @@ export const transformMetadataBlockModelsToRequestPayload = ( (metadataBlock) => metadataBlock.name == metadataBlockValuesModel.name, ); metadataBlocksRequestPayload[metadataBlockValuesModel.name] = { - displayName: metadataBlock.displayName, fields: transformMetadataFieldModelsToRequestPayload( metadataBlockValuesModel.fields, metadataBlock.metadataFields, ), + displayName: metadataBlock.displayName, }; }); return metadataBlocksRequestPayload; diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 642e4752..b2a10b78 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -81,6 +81,14 @@ export const createNewDatasetModelWithoutSecondLevelRequiredField = (): NewDatas }; }; +/** + * + * This method creates a simplified and altered version of the Citation Metadata Block, only for testing purposes. + * For this reason some of the metadata fields do not correspond to the real ones. + * + * @returns {MetadataBlock} A MetadataBlock testing instance. + * + **/ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { return { id: 1, diff --git a/test/unit/datasets/newDatasetTransformers.test.ts b/test/unit/datasets/newDatasetTransformers.test.ts index ed3dfdc5..01e0050c 100644 --- a/test/unit/datasets/newDatasetTransformers.test.ts +++ b/test/unit/datasets/newDatasetTransformers.test.ts @@ -1,6 +1,9 @@ -import { createSandbox, SinonSandbox } from 'sinon'; +import { assert, createSandbox, SinonSandbox } from 'sinon'; import { createNewDatasetMetadataBlockModel, createNewDatasetModel } from '../../testHelpers/datasets/newDatasetHelper'; -import { transformNewDatasetModelToRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; +import { + transformNewDatasetModelToRequestPayload, + NewDatasetRequestPayload, +} from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; describe('transformNewDatasetModelToRequestPayload', () => { const sandbox: SinonSandbox = createSandbox(); @@ -9,11 +12,74 @@ describe('transformNewDatasetModelToRequestPayload', () => { sandbox.restore(); }); - test('should not raise a validation error when a new dataset with only the required fields is valid', async () => { + test('happy path WIP', async () => { const testNewDataset = createNewDatasetModel(); const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + const expected: NewDatasetRequestPayload = { + datasetVersion: { + metadataBlocks: { + citation: { + fields: [ + { + value: 'test dataset', + typeClass: 'primitive', + multiple: false, + typeName: 'title', + }, + { + value: [ + { + authorName: { + value: 'Admin, Dataverse', + typeClass: 'primitive', + multiple: false, + typeName: 'authorName', + }, + authorAffiliation: { + value: 'Dataverse.org', + typeClass: 'primitive', + multiple: false, + typeName: 'authorAffiliation', + }, + }, + { + authorName: { + value: 'Owner, Dataverse', + typeClass: 'primitive', + multiple: false, + typeName: 'authorName', + }, + authorAffiliation: { + value: 'Dataverse.org', + typeClass: 'primitive', + multiple: false, + typeName: 'authorAffiliation', + }, + }, + ], + typeClass: 'compound', + multiple: true, + typeName: 'author', + }, + { + value: ['alternative1', 'alternative2'], + typeClass: 'primitive', + multiple: true, + typeName: 'alternativeRequiredTitle', + }, + ], + displayName: 'Citation Metadata', + }, + }, + }, + }; + const actual = transformNewDatasetModelToRequestPayload(testNewDataset, testMetadataBlocks); - console.log(actual); + //assert.match(actual, expected); + assert.match( + actual.datasetVersion.metadataBlocks.citation.fields[1].value, + expected.datasetVersion.metadataBlocks.citation.fields[1].value, + ); }); }); From 5ad4e25ec22895b287a88f895ef72520fc541827 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Jan 2024 10:25:51 +0000 Subject: [PATCH 27/96] Added: newDatasetTransformers tweaks and fixes and passing unit test --- .../transformers/newDatasetTransformers.ts | 90 ++++++++++--------- test/testHelpers/datasets/newDatasetHelper.ts | 62 +++++++++++++ .../datasets/newDatasetTransformers.test.ts | 86 ++---------------- 3 files changed, 118 insertions(+), 120 deletions(-) diff --git a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts index 6c8d2021..b6c8fbab 100644 --- a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts +++ b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts @@ -46,17 +46,17 @@ export const transformNewDatasetModelToRequestPayload = ( }; export const transformMetadataBlockModelsToRequestPayload = ( - metadataBlockValuesModels: NewDatasetMetadataBlockValues[], + newDatasetMetadataBlocksValues: NewDatasetMetadataBlockValues[], metadataBlocks: MetadataBlock[], ): Record => { let metadataBlocksRequestPayload: Record = {}; - metadataBlockValuesModels.forEach(function (metadataBlockValuesModel: NewDatasetMetadataBlockValues) { + newDatasetMetadataBlocksValues.forEach(function (newDatasetMetadataBlockValues: NewDatasetMetadataBlockValues) { const metadataBlock: MetadataBlock = metadataBlocks.find( - (metadataBlock) => metadataBlock.name == metadataBlockValuesModel.name, + (metadataBlock) => metadataBlock.name == newDatasetMetadataBlockValues.name, ); - metadataBlocksRequestPayload[metadataBlockValuesModel.name] = { + metadataBlocksRequestPayload[newDatasetMetadataBlockValues.name] = { fields: transformMetadataFieldModelsToRequestPayload( - metadataBlockValuesModel.fields, + newDatasetMetadataBlockValues.fields, metadataBlock.metadataFields, ), displayName: metadataBlock.displayName, @@ -66,64 +66,67 @@ export const transformMetadataBlockModelsToRequestPayload = ( }; export const transformMetadataFieldModelsToRequestPayload = ( - metadataFieldsModel: NewDatasetMetadataFields, - metadataFields: Record, + newDatasetMetadataFields: NewDatasetMetadataFields, + metadataBlockFields: Record, ): MetadataFieldRequestPayload[] => { let metadataFieldsRequestPayload: MetadataFieldRequestPayload[] = []; - for (const metadataFieldKey of Object.keys(metadataFieldsModel)) { - const metadataFieldValue: NewDatasetMetadataFieldValue = metadataFieldsModel[metadataFieldKey]; - metadataFieldsRequestPayload.push( - transformMetadataFieldValueToRequestPayload( - metadataFieldValue, - metadataFieldKey, - metadataFields[metadataFieldKey], + for (const metadataFieldKey of Object.keys(newDatasetMetadataFields)) { + const newDatasetMetadataChildFieldValue: NewDatasetMetadataFieldValue = newDatasetMetadataFields[metadataFieldKey]; + metadataFieldsRequestPayload.push({ + value: transformMetadataFieldValueToRequestPayload( + newDatasetMetadataChildFieldValue, + metadataBlockFields[metadataFieldKey], ), - ); + typeClass: metadataBlockFields[metadataFieldKey].typeClass, + multiple: metadataBlockFields[metadataFieldKey].multiple, + typeName: metadataFieldKey, + }); } return metadataFieldsRequestPayload; }; export const transformMetadataFieldValueToRequestPayload = ( - metadataFieldValue: NewDatasetMetadataFieldValue, - metadataFieldKey: string, - metadataFieldInfo: MetadataFieldInfo, -): MetadataFieldRequestPayload => { + newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, + metadataBlockFieldInfo: MetadataFieldInfo, +): MetadataFieldValueRequestPayload => { let value: MetadataFieldValueRequestPayload; - if (Array.isArray(metadataFieldValue)) { - if (typeof metadataFieldValue[0] == 'string') { - value = metadataFieldValue as string[]; + if (metadataBlockFieldInfo.multiple) { + const newDatasetMetadataChildFieldValues = newDatasetMetadataFieldValue as + | string[] + | NewDatasetMetadataChildFieldValue[]; + if (typeof newDatasetMetadataChildFieldValues[0] == 'string') { + value = newDatasetMetadataFieldValue as string[]; } else { - let value: Record[] = []; - metadataFieldValue.forEach(function (metadataFieldValue: NewDatasetMetadataFieldValue) { - value.push( - transformMetadataChildFieldValueToRequestPayload( - metadataFieldValue as NewDatasetMetadataChildFieldValue, - metadataFieldInfo, - ), + value = []; + (newDatasetMetadataChildFieldValues as NewDatasetMetadataChildFieldValue[]).forEach(function ( + childMetadataFieldValue: NewDatasetMetadataChildFieldValue, + ) { + (value as Record[]).push( + transformMetadataChildFieldValueToRequestPayload(childMetadataFieldValue, metadataBlockFieldInfo), ); }); } - } else if (typeof metadataFieldValue == 'string') { - value = metadataFieldValue; } else { - value = transformMetadataChildFieldValueToRequestPayload(metadataFieldValue, metadataFieldInfo); + if (typeof newDatasetMetadataFieldValue == 'string') { + value = newDatasetMetadataFieldValue; + } else { + value = transformMetadataChildFieldValueToRequestPayload( + newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValue, + metadataBlockFieldInfo, + ); + } } - return { - value: value, - typeClass: metadataFieldInfo.typeClass, - multiple: metadataFieldInfo.multiple, - typeName: metadataFieldKey, - }; + return value; }; export const transformMetadataChildFieldValueToRequestPayload = ( - metadataFieldValue: NewDatasetMetadataChildFieldValue, - metadataFieldInfo: MetadataFieldInfo, + newDatasetMetadataChildFieldValue: NewDatasetMetadataChildFieldValue, + metadataBlockFieldInfo: MetadataFieldInfo, ): Record => { let metadataChildFieldRequestPayload: Record = {}; - for (const metadataChildFieldKey of Object.keys(metadataFieldValue)) { - const childMetadataFieldInfo: MetadataFieldInfo = metadataFieldInfo.childMetadataFields[metadataChildFieldKey]; - const value: string = metadataFieldValue[metadataChildFieldKey] as unknown as string; + for (const metadataChildFieldKey of Object.keys(newDatasetMetadataChildFieldValue)) { + const childMetadataFieldInfo: MetadataFieldInfo = metadataBlockFieldInfo.childMetadataFields[metadataChildFieldKey]; + const value: string = newDatasetMetadataChildFieldValue[metadataChildFieldKey] as unknown as string; metadataChildFieldRequestPayload[metadataChildFieldKey] = { value: value, typeClass: childMetadataFieldInfo.typeClass, @@ -131,5 +134,6 @@ export const transformMetadataChildFieldValueToRequestPayload = ( typeName: metadataChildFieldKey, }; } + return metadataChildFieldRequestPayload; }; diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index b2a10b78..64f5e589 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -1,5 +1,6 @@ import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; import { MetadataBlock } from '../../../src'; +import { NewDatasetRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; export const createNewDatasetModel = ( titleFieldValue?: NewDatasetMetadataFieldValue, @@ -248,3 +249,64 @@ export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { }, }; }; + +export const createNewDatasetRequestPayload = (): NewDatasetRequestPayload => { + return { + datasetVersion: { + metadataBlocks: { + citation: { + fields: [ + { + value: 'test dataset', + typeClass: 'primitive', + multiple: false, + typeName: 'title', + }, + { + value: [ + { + authorName: { + value: 'Admin, Dataverse', + typeClass: 'primitive', + multiple: false, + typeName: 'authorName', + }, + authorAffiliation: { + value: 'Dataverse.org', + typeClass: 'primitive', + multiple: false, + typeName: 'authorAffiliation', + }, + }, + { + authorName: { + value: 'Owner, Dataverse', + typeClass: 'primitive', + multiple: false, + typeName: 'authorName', + }, + authorAffiliation: { + value: 'Dataverse.org', + typeClass: 'primitive', + multiple: false, + typeName: 'authorAffiliation', + }, + }, + ], + typeClass: 'compound', + multiple: true, + typeName: 'author', + }, + { + value: ['alternative1', 'alternative2'], + typeClass: 'primitive', + multiple: true, + typeName: 'alternativeRequiredTitle', + }, + ], + displayName: 'Citation Metadata', + }, + }, + }, + }; +}; diff --git a/test/unit/datasets/newDatasetTransformers.test.ts b/test/unit/datasets/newDatasetTransformers.test.ts index 01e0050c..6804b736 100644 --- a/test/unit/datasets/newDatasetTransformers.test.ts +++ b/test/unit/datasets/newDatasetTransformers.test.ts @@ -1,85 +1,17 @@ -import { assert, createSandbox, SinonSandbox } from 'sinon'; -import { createNewDatasetMetadataBlockModel, createNewDatasetModel } from '../../testHelpers/datasets/newDatasetHelper'; +import { assert } from 'sinon'; import { - transformNewDatasetModelToRequestPayload, - NewDatasetRequestPayload, -} from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; + createNewDatasetMetadataBlockModel, + createNewDatasetModel, + createNewDatasetRequestPayload, +} from '../../testHelpers/datasets/newDatasetHelper'; +import { transformNewDatasetModelToRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; describe('transformNewDatasetModelToRequestPayload', () => { - const sandbox: SinonSandbox = createSandbox(); - - afterEach(() => { - sandbox.restore(); - }); - - test('happy path WIP', async () => { + test('should correctly transform a new dataset model to a new dataset request payload', async () => { const testNewDataset = createNewDatasetModel(); const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; - - const expected: NewDatasetRequestPayload = { - datasetVersion: { - metadataBlocks: { - citation: { - fields: [ - { - value: 'test dataset', - typeClass: 'primitive', - multiple: false, - typeName: 'title', - }, - { - value: [ - { - authorName: { - value: 'Admin, Dataverse', - typeClass: 'primitive', - multiple: false, - typeName: 'authorName', - }, - authorAffiliation: { - value: 'Dataverse.org', - typeClass: 'primitive', - multiple: false, - typeName: 'authorAffiliation', - }, - }, - { - authorName: { - value: 'Owner, Dataverse', - typeClass: 'primitive', - multiple: false, - typeName: 'authorName', - }, - authorAffiliation: { - value: 'Dataverse.org', - typeClass: 'primitive', - multiple: false, - typeName: 'authorAffiliation', - }, - }, - ], - typeClass: 'compound', - multiple: true, - typeName: 'author', - }, - { - value: ['alternative1', 'alternative2'], - typeClass: 'primitive', - multiple: true, - typeName: 'alternativeRequiredTitle', - }, - ], - displayName: 'Citation Metadata', - }, - }, - }, - }; - + const expectedNewDatasetRequestPayload = createNewDatasetRequestPayload(); const actual = transformNewDatasetModelToRequestPayload(testNewDataset, testMetadataBlocks); - //assert.match(actual, expected); - assert.match( - actual.datasetVersion.metadataBlocks.citation.fields[1].value, - expected.datasetVersion.metadataBlocks.citation.fields[1].value, - ); + assert.match(actual, expectedNewDatasetRequestPayload); }); }); From 8f09830f6a06f1e315de98114dc0c8bef779969f Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Jan 2024 11:33:04 +0000 Subject: [PATCH 28/96] Stash: createDataset repository logic WIP --- .../repositories/IDatasetsRepository.ts | 2 +- src/datasets/domain/useCases/CreateDataset.ts | 4 +- .../infra/repositories/DatasetsRepository.ts | 17 +++++++- test/unit/datasets/CreateDataset.test.ts | 4 +- test/unit/datasets/DatasetsRepository.test.ts | 40 +++++++++++++++++++ 5 files changed, 60 insertions(+), 7 deletions(-) diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index 06ff92a9..2540cc9e 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -14,5 +14,5 @@ export interface IDatasetsRepository { getDatasetUserPermissions(datasetId: number | string): Promise; getDatasetLocks(datasetId: number | string): Promise; getAllDatasetPreviews(limit?: number, offset?: number): Promise; - createDataset(newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[]): Promise; + createDataset(newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[], collectionId: string): Promise; } diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts index 1d5905d4..fd51289f 100644 --- a/src/datasets/domain/useCases/CreateDataset.ts +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -20,10 +20,10 @@ export class CreateDataset implements UseCase { this.newDatasetValidator = newDatasetValidator; } - async execute(newDataset: NewDataset): Promise { + async execute(newDataset: NewDataset, collectionId: string = 'root'): Promise { const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset); return await this.newDatasetValidator.validate(newDataset, metadataBlocks).then(async () => { - return await this.datasetsRepository.createDataset(newDataset, metadataBlocks); + return await this.datasetsRepository.createDataset(newDataset, metadataBlocks, collectionId); }); } diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index 537c1df3..dfb099d2 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -10,6 +10,7 @@ import { transformDatasetPreviewsResponseToDatasetPreviewSubset } from './transf import { DatasetPreviewSubset } from '../../domain/models/DatasetPreviewSubset'; import { NewDataset } from '../../domain/models/NewDataset'; import { MetadataBlock } from '../../../metadataBlocks'; +import { transformNewDatasetModelToRequestPayload } from './transformers/newDatasetTransformers'; export interface GetAllDatasetPreviewsQueryParams { per_page?: number; @@ -18,6 +19,7 @@ export interface GetAllDatasetPreviewsQueryParams { export class DatasetsRepository extends ApiRepository implements IDatasetsRepository { private readonly datasetsResourceName: string = 'datasets'; + private readonly dataversesResourceName: string = 'dataverses'; public async getDatasetSummaryFieldNames(): Promise { return this.doGet(this.buildApiEndpoint(this.datasetsResourceName, 'summaryFieldNames')) @@ -109,7 +111,18 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi }); } - public async createDataset(newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[]): Promise { - console.log(newDataset + ' ' + datasetMetadataBlocks.length); + public async createDataset( + newDataset: NewDataset, + datasetMetadataBlocks: MetadataBlock[], + collectionId: string, + ): Promise { + return this.doPost( + this.buildApiEndpoint(this.dataversesResourceName, `datasets`, collectionId), + transformNewDatasetModelToRequestPayload(newDataset, datasetMetadataBlocks), + ) + .then(() => {}) + .catch((error) => { + throw error; + }); } } diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts index 260e68f1..6b626a78 100644 --- a/test/unit/datasets/CreateDataset.test.ts +++ b/test/unit/datasets/CreateDataset.test.ts @@ -37,7 +37,7 @@ describe('execute', () => { await sut.execute(testDataset); assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); - assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks); + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root') assert.callOrder(validateStub, createDatasetStub); }); @@ -77,7 +77,7 @@ describe('execute', () => { assert.match(actualError, testWriteError); assert.calledWithExactly(validateMock, testDataset, testMetadataBlocks); - assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks); + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root'); assert.callOrder(validateMock, createDatasetStub); }); diff --git a/test/unit/datasets/DatasetsRepository.test.ts b/test/unit/datasets/DatasetsRepository.test.ts index 261df777..9179a3f2 100644 --- a/test/unit/datasets/DatasetsRepository.test.ts +++ b/test/unit/datasets/DatasetsRepository.test.ts @@ -17,6 +17,11 @@ import { createDatasetPreviewModel, createDatasetPreviewPayload, } from '../../testHelpers/datasets/datasetPreviewHelper'; +import { + createNewDatasetModel, + createNewDatasetMetadataBlockModel, + createNewDatasetRequestPayload, +} from '../../testHelpers/datasets/newDatasetHelper'; describe('DatasetsRepository', () => { const sandbox: SinonSandbox = createSandbox(); @@ -600,4 +605,39 @@ describe('DatasetsRepository', () => { expect(error).to.be.instanceOf(Error); }); }); + + describe('createDataset', () => { + const testNewDataset = createNewDatasetModel(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + const testCollectionName = 'test'; + const expectedNewDatasetRequestPayloadJson = JSON.stringify(createNewDatasetRequestPayload()); + + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${testCollectionName}/datasets`; + + test('should call the API with a correct request payload', async () => { + const axiosPostMock = sandbox.stub(axios, 'post'); + + // API Key auth + await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); + + assert.calledWithExactly( + axiosPostMock, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); + + assert.calledWithExactly( + axiosPostMock, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, + ); + }); + }); }); From e4670cf31a411c724c6a4b775ae583222ae92650 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Jan 2024 11:40:39 +0000 Subject: [PATCH 29/96] Added: completed createDataset repository logic with passing tests --- .../infra/repositories/DatasetsRepository.ts | 3 +-- test/unit/datasets/DatasetsRepository.test.ts | 22 ++++++++++++++++--- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index dfb099d2..f33b381e 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -19,7 +19,6 @@ export interface GetAllDatasetPreviewsQueryParams { export class DatasetsRepository extends ApiRepository implements IDatasetsRepository { private readonly datasetsResourceName: string = 'datasets'; - private readonly dataversesResourceName: string = 'dataverses'; public async getDatasetSummaryFieldNames(): Promise { return this.doGet(this.buildApiEndpoint(this.datasetsResourceName, 'summaryFieldNames')) @@ -117,7 +116,7 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi collectionId: string, ): Promise { return this.doPost( - this.buildApiEndpoint(this.dataversesResourceName, `datasets`, collectionId), + `/dataverses/${collectionId}/datasets`, transformNewDatasetModelToRequestPayload(newDataset, datasetMetadataBlocks), ) .then(() => {}) diff --git a/test/unit/datasets/DatasetsRepository.test.ts b/test/unit/datasets/DatasetsRepository.test.ts index 9179a3f2..d581d38e 100644 --- a/test/unit/datasets/DatasetsRepository.test.ts +++ b/test/unit/datasets/DatasetsRepository.test.ts @@ -22,6 +22,7 @@ import { createNewDatasetMetadataBlockModel, createNewDatasetRequestPayload, } from '../../testHelpers/datasets/newDatasetHelper'; +import { WriteError } from '../../../src'; describe('DatasetsRepository', () => { const sandbox: SinonSandbox = createSandbox(); @@ -615,13 +616,13 @@ describe('DatasetsRepository', () => { const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${testCollectionName}/datasets`; test('should call the API with a correct request payload', async () => { - const axiosPostMock = sandbox.stub(axios, 'post'); + const axiosPostStub = sandbox.stub(axios, 'post').resolves(); // API Key auth await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); assert.calledWithExactly( - axiosPostMock, + axiosPostStub, expectedApiEndpoint, expectedNewDatasetRequestPayloadJson, TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, @@ -633,11 +634,26 @@ describe('DatasetsRepository', () => { await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); assert.calledWithExactly( - axiosPostMock, + axiosPostStub, expectedApiEndpoint, expectedNewDatasetRequestPayloadJson, TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, ); }); + + test('should return error result on error response', async () => { + const axiosPostStub = sandbox.stub(axios, 'post').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: WriteError = undefined; + await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName).catch((e) => (error = e)); + + assert.calledWithExactly( + axiosPostStub, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + expect(error).to.be.instanceOf(Error); + }); }); }); From 0a4464e52a96735fe31f3693f585bba7c91da13d Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Jan 2024 12:10:04 +0000 Subject: [PATCH 30/96] Stash: createDataset IT WIP (failing) --- .../datasets/DatasetsRepository.test.ts | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index a3064195..a9c7eaa4 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -12,6 +12,8 @@ import { DatasetNotNumberedVersion, DatasetLockType, DatasetPreviewSubset } from import { fail } from 'assert'; import { ApiConfig } from '../../../src'; import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig'; +import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; +import { MetadataBlocksRepository } from '../../../src/metadataBlocks/infra/repositories/MetadataBlocksRepository'; describe('DatasetsRepository', () => { const sut: DatasetsRepository = new DatasetsRepository(); @@ -243,4 +245,54 @@ describe('DatasetsRepository', () => { expect(typeof actualDatasetCitation).toBe('string'); }); }); + + describe('createDataset', () => { + const testNewDatasetTitle = 'Dataset created using the createDataset use case'; + const testNewDataset: NewDataset = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: testNewDatasetTitle, + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + ], + datasetContact: [ + { + datasetContactEmail: 'finch@mailinator.com', + datasetContactName: 'Finch, Fiona', + }, + ], + dsDescription: [ + { + dsDescriptionValue: 'This is the description of the dataset.', + }, + ], + subject: ['Medicine, Health and Life Sciences'], + }, + }, + ], + }; + + test('should create a dataset', async () => { + const metadataBlocksRepository = new MetadataBlocksRepository(); + const citationMetadataBlock = await metadataBlocksRepository.getMetadataBlockByName('citation'); + + await sut.createDataset(testNewDataset, [citationMetadataBlock], 'root').catch(() => { + assert.fail('Error while creating the Dataset'); + }); + + await new Promise((resolve) => setTimeout(resolve, 2000)); + + const actualCreatedDataset = await sut.getDataset(4, latestVersionId, false); + expect(actualCreatedDataset.metadataBlocks[0].fields.title).toBe(testNewDatasetTitle); + }); + }); }); From 97c47fdbad01efb1a0aa83aa0334de3acc55c6f3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Jan 2024 16:40:49 +0000 Subject: [PATCH 31/96] Changed: returning ids when the dataset is created --- .../models/CreatedDatasetIdentifiers.ts | 4 + .../repositories/IDatasetsRepository.ts | 2 +- src/datasets/domain/useCases/CreateDataset.ts | 5 +- .../infra/repositories/DatasetsRepository.ts | 11 +- .../datasets/DatasetsRepository.test.ts | 106 +++++++++++------- test/integration/environment/setup.js | 5 +- test/unit/datasets/CreateDataset.test.ts | 16 ++- test/unit/datasets/DatasetsRepository.test.ts | 25 ++++- 8 files changed, 118 insertions(+), 56 deletions(-) create mode 100644 src/datasets/domain/models/CreatedDatasetIdentifiers.ts diff --git a/src/datasets/domain/models/CreatedDatasetIdentifiers.ts b/src/datasets/domain/models/CreatedDatasetIdentifiers.ts new file mode 100644 index 00000000..c4dc825a --- /dev/null +++ b/src/datasets/domain/models/CreatedDatasetIdentifiers.ts @@ -0,0 +1,4 @@ +export interface CreatedDatasetIdentifiers { + persistentId: string; + numericId: number; +} diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index 2540cc9e..929e8f54 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -14,5 +14,5 @@ export interface IDatasetsRepository { getDatasetUserPermissions(datasetId: number | string): Promise; getDatasetLocks(datasetId: number | string): Promise; getAllDatasetPreviews(limit?: number, offset?: number): Promise; - createDataset(newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[], collectionId: string): Promise; + createDataset(newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[], collectionId: string): Promise; } diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts index fd51289f..7840446c 100644 --- a/src/datasets/domain/useCases/CreateDataset.ts +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -4,8 +4,9 @@ import { NewDataset, NewDatasetMetadataBlockValues } from '../models/NewDataset' import { NewResourceValidator } from '../../../core/domain/useCases/validators/NewResourceValidator'; import { IMetadataBlocksRepository } from '../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { MetadataBlock } from '../../../metadataBlocks'; +import { CreatedDatasetIdentifiers } from '../models/CreatedDatasetIdentifiers'; -export class CreateDataset implements UseCase { +export class CreateDataset implements UseCase { private datasetsRepository: IDatasetsRepository; private metadataBlocksRepository: IMetadataBlocksRepository; private newDatasetValidator: NewResourceValidator; @@ -20,7 +21,7 @@ export class CreateDataset implements UseCase { this.newDatasetValidator = newDatasetValidator; } - async execute(newDataset: NewDataset, collectionId: string = 'root'): Promise { + async execute(newDataset: NewDataset, collectionId: string = 'root'): Promise { const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset); return await this.newDatasetValidator.validate(newDataset, metadataBlocks).then(async () => { return await this.datasetsRepository.createDataset(newDataset, metadataBlocks, collectionId); diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index f33b381e..ba0dfd75 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -11,6 +11,7 @@ import { DatasetPreviewSubset } from '../../domain/models/DatasetPreviewSubset'; import { NewDataset } from '../../domain/models/NewDataset'; import { MetadataBlock } from '../../../metadataBlocks'; import { transformNewDatasetModelToRequestPayload } from './transformers/newDatasetTransformers'; +import { CreatedDatasetIdentifiers } from '../../domain/models/CreatedDatasetIdentifiers'; export interface GetAllDatasetPreviewsQueryParams { per_page?: number; @@ -114,12 +115,18 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[], collectionId: string, - ): Promise { + ): Promise { return this.doPost( `/dataverses/${collectionId}/datasets`, transformNewDatasetModelToRequestPayload(newDataset, datasetMetadataBlocks), ) - .then(() => {}) + .then((response) => { + const responseData = response.data.data; + return { + persistentId: responseData.persistentId, + numericId: responseData.id, + }; + }) .catch((error) => { throw error; }); diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index a9c7eaa4..d6003261 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -14,6 +14,7 @@ import { ApiConfig } from '../../../src'; import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig'; import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; import { MetadataBlocksRepository } from '../../../src/metadataBlocks/infra/repositories/MetadataBlocksRepository'; +import { Author, DatasetContact, DatasetDescription } from '../../../src/datasets/domain/models/Dataset'; describe('DatasetsRepository', () => { const sut: DatasetsRepository = new DatasetsRepository(); @@ -247,52 +248,75 @@ describe('DatasetsRepository', () => { }); describe('createDataset', () => { - const testNewDatasetTitle = 'Dataset created using the createDataset use case'; - const testNewDataset: NewDataset = { - metadataBlockValues: [ - { - name: 'citation', - fields: { - title: testNewDatasetTitle, - author: [ - { - authorName: 'Admin, Dataverse', - authorAffiliation: 'Dataverse.org', - }, - { - authorName: 'Owner, Dataverse', - authorAffiliation: 'Dataverse.org', - }, - ], - datasetContact: [ - { - datasetContactEmail: 'finch@mailinator.com', - datasetContactName: 'Finch, Fiona', - }, - ], - dsDescription: [ - { - dsDescriptionValue: 'This is the description of the dataset.', - }, - ], - subject: ['Medicine, Health and Life Sciences'], + test('should create a dataset with the provided dataset citation fields', async () => { + const testTitle = 'Dataset created using the createDataset use case'; + const testAuthorName1 = 'Admin, Dataverse'; + const testAuthorName2 = 'Owner, Dataverse'; + const testAuthorAffiliation1 = 'Dataverse.org'; + const testAuthorAffiliation2 = 'Dataversedemo.org'; + const testContactEmail = 'finch@mailinator.com'; + const testContactName = 'Finch, Fiona'; + const testDescription = 'This is the description of the dataset.'; + const testSubject = ['Medicine, Health and Life Sciences']; + + const testNewDataset: NewDataset = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: testTitle, + author: [ + { + authorName: testAuthorName1, + authorAffiliation: testAuthorAffiliation1, + }, + { + authorName: testAuthorName2, + authorAffiliation: testAuthorAffiliation2, + }, + ], + datasetContact: [ + { + datasetContactEmail: testContactEmail, + datasetContactName: testContactName, + }, + ], + dsDescription: [ + { + dsDescriptionValue: testDescription, + }, + ], + subject: testSubject, + }, }, - }, - ], - }; + ], + }; - test('should create a dataset', async () => { const metadataBlocksRepository = new MetadataBlocksRepository(); const citationMetadataBlock = await metadataBlocksRepository.getMetadataBlockByName('citation'); + const createdDataset = await sut.createDataset(testNewDataset, [citationMetadataBlock], 'root'); + const actualCreatedDataset = await sut.getDataset(createdDataset.numericId, latestVersionId, false); - await sut.createDataset(testNewDataset, [citationMetadataBlock], 'root').catch(() => { - assert.fail('Error while creating the Dataset'); - }); - - await new Promise((resolve) => setTimeout(resolve, 2000)); - - const actualCreatedDataset = await sut.getDataset(4, latestVersionId, false); - expect(actualCreatedDataset.metadataBlocks[0].fields.title).toBe(testNewDatasetTitle); + expect(actualCreatedDataset.metadataBlocks[0].fields.title).toBe(testTitle); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[0] as Author).authorName).toBe(testAuthorName1); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[0] as Author).authorAffiliation).toBe( + testAuthorAffiliation1, + ); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[1] as Author).authorName).toBe(testAuthorName2); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[1] as Author).authorAffiliation).toBe( + testAuthorAffiliation2, + ); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.datasetContact[0] as DatasetContact).datasetContactEmail, + ).toBe(testContactEmail); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.datasetContact[0] as DatasetContact).datasetContactName, + ).toBe(testContactName); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.dsDescription[0] as DatasetDescription).dsDescriptionValue, + ).toBe(testDescription); + expect(actualCreatedDataset.metadataBlocks[0].fields.subject[0]).toBe(testSubject[0]); + expect(actualCreatedDataset.metadataBlocks[0].fields.subject[1]).toBe(testSubject[1]); }); }); }); diff --git a/test/integration/environment/setup.js b/test/integration/environment/setup.js index 57cf5c06..c618ce63 100644 --- a/test/integration/environment/setup.js +++ b/test/integration/environment/setup.js @@ -51,12 +51,11 @@ async function setupTestFixtures() { console.log('Creating test datasets...'); await createDatasetViaApi(datasetJson1) .then() - .catch((error) => { + .catch(() => { console.error('Tests setup: Error while creating test Dataset 1'); }); await createDatasetViaApi(datasetJson2) - .then() - .catch((error) => { + .catch(() => { console.error('Tests setup: Error while creating test Dataset 2'); }); console.log('Test datasets created'); diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts index 6b626a78..88977a77 100644 --- a/test/unit/datasets/CreateDataset.test.ts +++ b/test/unit/datasets/CreateDataset.test.ts @@ -1,4 +1,5 @@ import { CreateDataset } from '../../../src/datasets/domain/useCases/CreateDataset'; +import { CreatedDatasetIdentifiers } from '../../../src/datasets/domain/models/CreatedDatasetIdentifiers'; import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository'; import { assert, createSandbox, SinonSandbox } from 'sinon'; import { NewResourceValidator } from '../../../src/core/domain/useCases/validators/NewResourceValidator'; @@ -23,9 +24,14 @@ describe('execute', () => { return metadataBlocksRepositoryStub; } - test('should call repository when validation is successful', async () => { + test('should return new dataset identifiers when validation is successful and repository call is successful', async () => { + const testCreatedDatasetIdentifiers: CreatedDatasetIdentifiers = { + persistentId: 'test', + numericId: 1, + }; + const datasetsRepositoryStub = {}; - const createDatasetStub = sandbox.stub(); + const createDatasetStub = sandbox.stub().returns(testCreatedDatasetIdentifiers); datasetsRepositoryStub.createDataset = createDatasetStub; const newDatasetValidatorStub = {}; @@ -34,10 +40,12 @@ describe('execute', () => { const sut = new CreateDataset(datasetsRepositoryStub, setupMetadataBlocksRepositoryStub(), newDatasetValidatorStub); - await sut.execute(testDataset); + const actual = await sut.execute(testDataset); + + assert.match(actual, testCreatedDatasetIdentifiers); assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); - assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root') + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root'); assert.callOrder(validateStub, createDatasetStub); }); diff --git a/test/unit/datasets/DatasetsRepository.test.ts b/test/unit/datasets/DatasetsRepository.test.ts index d581d38e..eba9f682 100644 --- a/test/unit/datasets/DatasetsRepository.test.ts +++ b/test/unit/datasets/DatasetsRepository.test.ts @@ -613,13 +613,28 @@ describe('DatasetsRepository', () => { const testCollectionName = 'test'; const expectedNewDatasetRequestPayloadJson = JSON.stringify(createNewDatasetRequestPayload()); + const testCreatedDatasetIdentifiers = { + persistentId: 'test', + numericId: 1, + }; + + const testCreateDatasetResponse = { + data: { + status: 'OK', + data: { + id: testCreatedDatasetIdentifiers.numericId, + persistentId: testCreatedDatasetIdentifiers.persistentId, + }, + }, + }; + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${testCollectionName}/datasets`; test('should call the API with a correct request payload', async () => { - const axiosPostStub = sandbox.stub(axios, 'post').resolves(); + const axiosPostStub = sandbox.stub(axios, 'post').resolves(testCreateDatasetResponse); // API Key auth - await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); + let actual = await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); assert.calledWithExactly( axiosPostStub, @@ -628,10 +643,12 @@ describe('DatasetsRepository', () => { TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, ); + assert.match(actual, testCreatedDatasetIdentifiers); + // Session cookie auth ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); - await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); + actual = await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); assert.calledWithExactly( axiosPostStub, @@ -639,6 +656,8 @@ describe('DatasetsRepository', () => { expectedNewDatasetRequestPayloadJson, TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, ); + + assert.match(actual, testCreatedDatasetIdentifiers); }); test('should return error result on error response', async () => { From d9aa27a6f6f9d585e1c0f0dce9786f6a5b7c1a16 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Jan 2024 16:44:27 +0000 Subject: [PATCH 32/96] Added: missing models exported in index.ts --- src/datasets/index.ts | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/datasets/index.ts b/src/datasets/index.ts index 4e67636a..f611e4a8 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -50,3 +50,11 @@ export { } from './domain/models/Dataset'; export { DatasetPreview } from './domain/models/DatasetPreview'; export { DatasetPreviewSubset } from './domain/models/DatasetPreviewSubset'; +export { + NewDataset, + NewDatasetMetadataBlockValues, + NewDatasetMetadataFields, + NewDatasetMetadataFieldValue, + NewDatasetMetadataChildFieldValue, +} from './domain/models/NewDataset'; +export { CreatedDatasetIdentifiers } from './domain/models/CreatedDatasetIdentifiers'; From db836d8bd6714634c4976eb6dca6dc432ed188bc Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 19 Jan 2024 16:45:37 +0000 Subject: [PATCH 33/96] Fixed: missing import in IDatasetsRepository --- src/datasets/domain/repositories/IDatasetsRepository.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index 929e8f54..84ca932f 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -4,6 +4,7 @@ import { DatasetLock } from '../models/DatasetLock'; import { DatasetPreviewSubset } from '../models/DatasetPreviewSubset'; import { NewDataset } from '../models/NewDataset'; import { MetadataBlock } from '../../../metadataBlocks'; +import { CreatedDatasetIdentifiers } from '../models/CreatedDatasetIdentifiers'; export interface IDatasetsRepository { getDatasetSummaryFieldNames(): Promise; From b86911f0a523c805489cee5742cc3f8f6a3380e2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 22 Jan 2024 12:33:37 +0000 Subject: [PATCH 34/96] Refactor: DTO convention for new dataset creation --- src/datasets/domain/dtos/NewDatasetDTO.ts | 21 ++++++++++ src/datasets/domain/models/NewDataset.ts | 21 ---------- .../repositories/IDatasetsRepository.ts | 4 +- src/datasets/domain/useCases/CreateDataset.ts | 8 ++-- .../validators/NewDatasetValidator.ts | 30 +++++++-------- src/datasets/index.ts | 12 +++--- .../infra/repositories/DatasetsRepository.ts | 4 +- .../transformers/newDatasetTransformers.ts | 34 ++++++++--------- .../datasets/DatasetsRepository.test.ts | 4 +- test/testHelpers/datasets/newDatasetHelper.ts | 20 +++++----- test/unit/datasets/CreateDataset.test.ts | 4 +- test/unit/datasets/DatasetsRepository.test.ts | 4 +- .../unit/datasets/NewDatasetValidator.test.ts | 38 +++++++++---------- .../datasets/newDatasetTransformers.test.ts | 4 +- 14 files changed, 104 insertions(+), 104 deletions(-) create mode 100644 src/datasets/domain/dtos/NewDatasetDTO.ts delete mode 100644 src/datasets/domain/models/NewDataset.ts diff --git a/src/datasets/domain/dtos/NewDatasetDTO.ts b/src/datasets/domain/dtos/NewDatasetDTO.ts new file mode 100644 index 00000000..5d6cdaff --- /dev/null +++ b/src/datasets/domain/dtos/NewDatasetDTO.ts @@ -0,0 +1,21 @@ +import { DatasetLicense } from '../models/Dataset'; + +export interface NewDatasetDTO { + license?: DatasetLicense; + metadataBlockValues: NewDatasetMetadataBlockValuesDTO[]; +} + +export interface NewDatasetMetadataBlockValuesDTO { + name: string; + fields: NewDatasetMetadataFieldsDTO; +} + +export type NewDatasetMetadataFieldsDTO = Record; + +export type NewDatasetMetadataFieldValueDTO = + | string + | string[] + | NewDatasetMetadataChildFieldValueDTO + | NewDatasetMetadataChildFieldValueDTO[]; + +export type NewDatasetMetadataChildFieldValueDTO = Record; diff --git a/src/datasets/domain/models/NewDataset.ts b/src/datasets/domain/models/NewDataset.ts deleted file mode 100644 index 2795c192..00000000 --- a/src/datasets/domain/models/NewDataset.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { DatasetLicense } from './Dataset'; - -export interface NewDataset { - license?: DatasetLicense; - metadataBlockValues: NewDatasetMetadataBlockValues[]; -} - -export interface NewDatasetMetadataBlockValues { - name: string; - fields: NewDatasetMetadataFields; -} - -export type NewDatasetMetadataFields = Record; - -export type NewDatasetMetadataFieldValue = - | string - | string[] - | NewDatasetMetadataChildFieldValue - | NewDatasetMetadataChildFieldValue[]; - -export type NewDatasetMetadataChildFieldValue = Record; diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index 84ca932f..41c7ca78 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -2,7 +2,7 @@ import { Dataset } from '../models/Dataset'; import { DatasetUserPermissions } from '../models/DatasetUserPermissions'; import { DatasetLock } from '../models/DatasetLock'; import { DatasetPreviewSubset } from '../models/DatasetPreviewSubset'; -import { NewDataset } from '../models/NewDataset'; +import { NewDatasetDTO } from '../dtos/NewDatasetDTO'; import { MetadataBlock } from '../../../metadataBlocks'; import { CreatedDatasetIdentifiers } from '../models/CreatedDatasetIdentifiers'; @@ -15,5 +15,5 @@ export interface IDatasetsRepository { getDatasetUserPermissions(datasetId: number | string): Promise; getDatasetLocks(datasetId: number | string): Promise; getAllDatasetPreviews(limit?: number, offset?: number): Promise; - createDataset(newDataset: NewDataset, datasetMetadataBlocks: MetadataBlock[], collectionId: string): Promise; + createDataset(newDataset: NewDatasetDTO, datasetMetadataBlocks: MetadataBlock[], collectionId: string): Promise; } diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts index 7840446c..da73e517 100644 --- a/src/datasets/domain/useCases/CreateDataset.ts +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -1,6 +1,6 @@ import { UseCase } from '../../../core/domain/useCases/UseCase'; import { IDatasetsRepository } from '../repositories/IDatasetsRepository'; -import { NewDataset, NewDatasetMetadataBlockValues } from '../models/NewDataset'; +import { NewDatasetDTO, NewDatasetMetadataBlockValuesDTO } from '../dtos/NewDatasetDTO'; import { NewResourceValidator } from '../../../core/domain/useCases/validators/NewResourceValidator'; import { IMetadataBlocksRepository } from '../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; import { MetadataBlock } from '../../../metadataBlocks'; @@ -21,19 +21,19 @@ export class CreateDataset implements UseCase { this.newDatasetValidator = newDatasetValidator; } - async execute(newDataset: NewDataset, collectionId: string = 'root'): Promise { + async execute(newDataset: NewDatasetDTO, collectionId: string = 'root'): Promise { const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset); return await this.newDatasetValidator.validate(newDataset, metadataBlocks).then(async () => { return await this.datasetsRepository.createDataset(newDataset, metadataBlocks, collectionId); }); } - async getNewDatasetMetadataBlocks(newDataset: NewDataset): Promise { + async getNewDatasetMetadataBlocks(newDataset: NewDatasetDTO): Promise { let metadataBlocks: MetadataBlock[] = []; for (const metadataBlockValue in newDataset.metadataBlockValues) { metadataBlocks.push( await this.metadataBlocksRepository.getMetadataBlockByName( - (metadataBlockValue as unknown as NewDatasetMetadataBlockValues).name, + (metadataBlockValue as unknown as NewDatasetMetadataBlockValuesDTO).name, ), ); } diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 712fb1f2..d5f63692 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -1,9 +1,9 @@ import { - NewDataset, - NewDatasetMetadataFieldValue, - NewDatasetMetadataChildFieldValue, - NewDatasetMetadataBlockValues, -} from '../../models/NewDataset'; + NewDatasetDTO, + NewDatasetMetadataFieldValueDTO, + NewDatasetMetadataChildFieldValueDTO, + NewDatasetMetadataBlockValuesDTO, +} from '../../dtos/NewDatasetDTO'; import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; import { MetadataFieldInfo, MetadataBlock } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; @@ -15,21 +15,21 @@ import { DateFormatFieldError } from './errors/DateFormatFieldError'; export interface NewDatasetMetadataFieldAndValueInfo { metadataFieldInfo: MetadataFieldInfo; metadataFieldKey: string; - metadataFieldValue: NewDatasetMetadataFieldValue; + metadataFieldValue: NewDatasetMetadataFieldValueDTO; metadataBlockName: string; metadataParentFieldKey?: string; metadataFieldPosition?: number; } export class NewDatasetValidator implements NewResourceValidator { - async validate(resource: NewDataset, metadataBlocks: MetadataBlock[]): Promise { + async validate(resource: NewDatasetDTO, metadataBlocks: MetadataBlock[]): Promise { for (const metadataBlockValues of resource.metadataBlockValues) { await this.validateMetadataBlock(metadataBlockValues, metadataBlocks); } } private async validateMetadataBlock( - metadataBlockValues: NewDatasetMetadataBlockValues, + metadataBlockValues: NewDatasetMetadataBlockValuesDTO, metadataBlocks: MetadataBlock[], ) { const metadataBlockName = metadataBlockValues.name; @@ -99,7 +99,7 @@ export class NewDatasetValidator implements NewResourceValidator { ); } - const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValue[]; + const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValueDTO[]; fieldValues.forEach((value, metadataFieldPosition) => { this.validateFieldValue({ metadataFieldInfo: metadataFieldInfo, @@ -189,7 +189,7 @@ export class NewDatasetValidator implements NewResourceValidator { metadataFieldInfo: childMetadataFieldInfo, metadataFieldKey: childMetadataFieldKey, metadataFieldValue: ( - newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValue + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO )[childMetadataFieldKey], metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, @@ -198,19 +198,19 @@ export class NewDatasetValidator implements NewResourceValidator { } } - private isEmptyString(metadataFieldValue: NewDatasetMetadataFieldValue): boolean { + private isEmptyString(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { return typeof metadataFieldValue == 'string' && metadataFieldValue.trim() === ''; } - private isEmptyArray(metadataFieldValue: NewDatasetMetadataFieldValue): boolean { - return Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0; + private isEmptyArray(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { + return Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0; } private isValidArrayType( - metadataFieldValue: Array, + metadataFieldValue: Array, expectedType: 'string' | 'object', ): boolean { - return metadataFieldValue.every((item: string | NewDatasetMetadataFieldValue) => typeof item === expectedType); + return metadataFieldValue.every((item: string | NewDatasetMetadataFieldValueDTO) => typeof item === expectedType); } private createGeneralValidationError( diff --git a/src/datasets/index.ts b/src/datasets/index.ts index f611e4a8..5028cf1f 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -51,10 +51,10 @@ export { export { DatasetPreview } from './domain/models/DatasetPreview'; export { DatasetPreviewSubset } from './domain/models/DatasetPreviewSubset'; export { - NewDataset, - NewDatasetMetadataBlockValues, - NewDatasetMetadataFields, - NewDatasetMetadataFieldValue, - NewDatasetMetadataChildFieldValue, -} from './domain/models/NewDataset'; + NewDatasetDTO as NewDataset, + NewDatasetMetadataBlockValuesDTO as NewDatasetMetadataBlockValues, + NewDatasetMetadataFieldsDTO as NewDatasetMetadataFields, + NewDatasetMetadataFieldValueDTO as NewDatasetMetadataFieldValue, + NewDatasetMetadataChildFieldValueDTO as NewDatasetMetadataChildFieldValue, +} from './domain/dtos/NewDatasetDTO'; export { CreatedDatasetIdentifiers } from './domain/models/CreatedDatasetIdentifiers'; diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index ba0dfd75..3323c429 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -8,7 +8,7 @@ import { DatasetLock } from '../../domain/models/DatasetLock'; import { transformDatasetLocksResponseToDatasetLocks } from './transformers/datasetLocksTransformers'; import { transformDatasetPreviewsResponseToDatasetPreviewSubset } from './transformers/datasetPreviewsTransformers'; import { DatasetPreviewSubset } from '../../domain/models/DatasetPreviewSubset'; -import { NewDataset } from '../../domain/models/NewDataset'; +import { NewDatasetDTO } from '../../domain/dtos/NewDatasetDTO'; import { MetadataBlock } from '../../../metadataBlocks'; import { transformNewDatasetModelToRequestPayload } from './transformers/newDatasetTransformers'; import { CreatedDatasetIdentifiers } from '../../domain/models/CreatedDatasetIdentifiers'; @@ -112,7 +112,7 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi } public async createDataset( - newDataset: NewDataset, + newDataset: NewDatasetDTO, datasetMetadataBlocks: MetadataBlock[], collectionId: string, ): Promise { diff --git a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts index b6c8fbab..b4ff1f37 100644 --- a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts +++ b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts @@ -1,10 +1,10 @@ import { - NewDataset, - NewDatasetMetadataBlockValues, - NewDatasetMetadataFields, - NewDatasetMetadataFieldValue, - NewDatasetMetadataChildFieldValue, -} from '../../../domain/models/NewDataset'; + NewDatasetDTO, + NewDatasetMetadataBlockValuesDTO, + NewDatasetMetadataFieldsDTO, + NewDatasetMetadataFieldValueDTO, + NewDatasetMetadataChildFieldValueDTO, +} from '../../../domain/dtos/NewDatasetDTO'; import { DatasetLicense } from '../../../domain/models/Dataset'; import { MetadataBlock, MetadataFieldInfo } from '../../../../metadataBlocks'; @@ -34,7 +34,7 @@ export type MetadataFieldValueRequestPayload = | Record[]; export const transformNewDatasetModelToRequestPayload = ( - newDataset: NewDataset, + newDataset: NewDatasetDTO, metadataBlocks: MetadataBlock[], ): NewDatasetRequestPayload => { return { @@ -46,11 +46,11 @@ export const transformNewDatasetModelToRequestPayload = ( }; export const transformMetadataBlockModelsToRequestPayload = ( - newDatasetMetadataBlocksValues: NewDatasetMetadataBlockValues[], + newDatasetMetadataBlocksValues: NewDatasetMetadataBlockValuesDTO[], metadataBlocks: MetadataBlock[], ): Record => { let metadataBlocksRequestPayload: Record = {}; - newDatasetMetadataBlocksValues.forEach(function (newDatasetMetadataBlockValues: NewDatasetMetadataBlockValues) { + newDatasetMetadataBlocksValues.forEach(function (newDatasetMetadataBlockValues: NewDatasetMetadataBlockValuesDTO) { const metadataBlock: MetadataBlock = metadataBlocks.find( (metadataBlock) => metadataBlock.name == newDatasetMetadataBlockValues.name, ); @@ -66,12 +66,12 @@ export const transformMetadataBlockModelsToRequestPayload = ( }; export const transformMetadataFieldModelsToRequestPayload = ( - newDatasetMetadataFields: NewDatasetMetadataFields, + newDatasetMetadataFields: NewDatasetMetadataFieldsDTO, metadataBlockFields: Record, ): MetadataFieldRequestPayload[] => { let metadataFieldsRequestPayload: MetadataFieldRequestPayload[] = []; for (const metadataFieldKey of Object.keys(newDatasetMetadataFields)) { - const newDatasetMetadataChildFieldValue: NewDatasetMetadataFieldValue = newDatasetMetadataFields[metadataFieldKey]; + const newDatasetMetadataChildFieldValue: NewDatasetMetadataFieldValueDTO = newDatasetMetadataFields[metadataFieldKey]; metadataFieldsRequestPayload.push({ value: transformMetadataFieldValueToRequestPayload( newDatasetMetadataChildFieldValue, @@ -86,20 +86,20 @@ export const transformMetadataFieldModelsToRequestPayload = ( }; export const transformMetadataFieldValueToRequestPayload = ( - newDatasetMetadataFieldValue: NewDatasetMetadataFieldValue, + newDatasetMetadataFieldValue: NewDatasetMetadataFieldValueDTO, metadataBlockFieldInfo: MetadataFieldInfo, ): MetadataFieldValueRequestPayload => { let value: MetadataFieldValueRequestPayload; if (metadataBlockFieldInfo.multiple) { const newDatasetMetadataChildFieldValues = newDatasetMetadataFieldValue as | string[] - | NewDatasetMetadataChildFieldValue[]; + | NewDatasetMetadataChildFieldValueDTO[]; if (typeof newDatasetMetadataChildFieldValues[0] == 'string') { value = newDatasetMetadataFieldValue as string[]; } else { value = []; - (newDatasetMetadataChildFieldValues as NewDatasetMetadataChildFieldValue[]).forEach(function ( - childMetadataFieldValue: NewDatasetMetadataChildFieldValue, + (newDatasetMetadataChildFieldValues as NewDatasetMetadataChildFieldValueDTO[]).forEach(function ( + childMetadataFieldValue: NewDatasetMetadataChildFieldValueDTO, ) { (value as Record[]).push( transformMetadataChildFieldValueToRequestPayload(childMetadataFieldValue, metadataBlockFieldInfo), @@ -111,7 +111,7 @@ export const transformMetadataFieldValueToRequestPayload = ( value = newDatasetMetadataFieldValue; } else { value = transformMetadataChildFieldValueToRequestPayload( - newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValue, + newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValueDTO, metadataBlockFieldInfo, ); } @@ -120,7 +120,7 @@ export const transformMetadataFieldValueToRequestPayload = ( }; export const transformMetadataChildFieldValueToRequestPayload = ( - newDatasetMetadataChildFieldValue: NewDatasetMetadataChildFieldValue, + newDatasetMetadataChildFieldValue: NewDatasetMetadataChildFieldValueDTO, metadataBlockFieldInfo: MetadataFieldInfo, ): Record => { let metadataChildFieldRequestPayload: Record = {}; diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index d6003261..d1c1d602 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -12,7 +12,7 @@ import { DatasetNotNumberedVersion, DatasetLockType, DatasetPreviewSubset } from import { fail } from 'assert'; import { ApiConfig } from '../../../src'; import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig'; -import { NewDataset } from '../../../src/datasets/domain/models/NewDataset'; +import { NewDatasetDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; import { MetadataBlocksRepository } from '../../../src/metadataBlocks/infra/repositories/MetadataBlocksRepository'; import { Author, DatasetContact, DatasetDescription } from '../../../src/datasets/domain/models/Dataset'; @@ -259,7 +259,7 @@ describe('DatasetsRepository', () => { const testDescription = 'This is the description of the dataset.'; const testSubject = ['Medicine, Health and Life Sciences']; - const testNewDataset: NewDataset = { + const testNewDataset: NewDatasetDTO = { metadataBlockValues: [ { name: 'citation', diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts index 64f5e589..4ff08439 100644 --- a/test/testHelpers/datasets/newDatasetHelper.ts +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -1,14 +1,14 @@ -import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; +import { NewDatasetDTO, NewDatasetMetadataFieldValueDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; import { MetadataBlock } from '../../../src'; import { NewDatasetRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; -export const createNewDatasetModel = ( - titleFieldValue?: NewDatasetMetadataFieldValue, - authorFieldValue?: NewDatasetMetadataFieldValue, - alternativeRequiredTitleValue?: NewDatasetMetadataFieldValue, - timePeriodCoveredStartValue?: NewDatasetMetadataFieldValue, - contributorTypeValue?: NewDatasetMetadataFieldValue, -): NewDataset => { +export const createNewDatasetDTO = ( + titleFieldValue?: NewDatasetMetadataFieldValueDTO, + authorFieldValue?: NewDatasetMetadataFieldValueDTO, + alternativeRequiredTitleValue?: NewDatasetMetadataFieldValueDTO, + timePeriodCoveredStartValue?: NewDatasetMetadataFieldValueDTO, + contributorTypeValue?: NewDatasetMetadataFieldValueDTO, +): NewDatasetDTO => { const validTitle = 'test dataset'; const validAuthorFieldValue = [ { @@ -47,7 +47,7 @@ export const createNewDatasetModel = ( }; }; -export const createNewDatasetModelWithoutFirstLevelRequiredField = (): NewDataset => { +export const createNewDatasetDTOWithoutFirstLevelRequiredField = (): NewDatasetDTO => { return { metadataBlockValues: [ { @@ -60,7 +60,7 @@ export const createNewDatasetModelWithoutFirstLevelRequiredField = (): NewDatase }; }; -export const createNewDatasetModelWithoutSecondLevelRequiredField = (): NewDataset => { +export const createNewDatasetDTOWithoutSecondLevelRequiredField = (): NewDatasetDTO => { return { metadataBlockValues: [ { diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts index 88977a77..6cfd4544 100644 --- a/test/unit/datasets/CreateDataset.test.ts +++ b/test/unit/datasets/CreateDataset.test.ts @@ -3,14 +3,14 @@ import { CreatedDatasetIdentifiers } from '../../../src/datasets/domain/models/C import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository'; import { assert, createSandbox, SinonSandbox } from 'sinon'; import { NewResourceValidator } from '../../../src/core/domain/useCases/validators/NewResourceValidator'; -import { createNewDatasetModel, createNewDatasetMetadataBlockModel } from '../../testHelpers/datasets/newDatasetHelper'; +import { createNewDatasetDTO, createNewDatasetMetadataBlockModel } from '../../testHelpers/datasets/newDatasetHelper'; import { ResourceValidationError } from '../../../src/core/domain/useCases/validators/errors/ResourceValidationError'; import { WriteError } from '../../../src'; import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; describe('execute', () => { const sandbox: SinonSandbox = createSandbox(); - const testDataset = createNewDatasetModel(); + const testDataset = createNewDatasetDTO(); const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; afterEach(() => { diff --git a/test/unit/datasets/DatasetsRepository.test.ts b/test/unit/datasets/DatasetsRepository.test.ts index eba9f682..c33bbeb4 100644 --- a/test/unit/datasets/DatasetsRepository.test.ts +++ b/test/unit/datasets/DatasetsRepository.test.ts @@ -18,7 +18,7 @@ import { createDatasetPreviewPayload, } from '../../testHelpers/datasets/datasetPreviewHelper'; import { - createNewDatasetModel, + createNewDatasetDTO, createNewDatasetMetadataBlockModel, createNewDatasetRequestPayload, } from '../../testHelpers/datasets/newDatasetHelper'; @@ -608,7 +608,7 @@ describe('DatasetsRepository', () => { }); describe('createDataset', () => { - const testNewDataset = createNewDatasetModel(); + const testNewDataset = createNewDatasetDTO(); const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; const testCollectionName = 'test'; const expectedNewDatasetRequestPayloadJson = JSON.stringify(createNewDatasetRequestPayload()); diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index c083ba06..82ca5274 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -1,14 +1,14 @@ import { NewDatasetValidator } from '../../../src/datasets/domain/useCases/validators/NewDatasetValidator'; import { assert, createSandbox, SinonSandbox } from 'sinon'; import { - createNewDatasetModel, + createNewDatasetDTO, createNewDatasetMetadataBlockModel, - createNewDatasetModelWithoutFirstLevelRequiredField, + createNewDatasetDTOWithoutFirstLevelRequiredField, } from '../../testHelpers/datasets/newDatasetHelper'; import { fail } from 'assert'; import { EmptyFieldError } from '../../../src/datasets/domain/useCases/validators/errors/EmptyFieldError'; import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError'; -import { NewDataset, NewDatasetMetadataFieldValue } from '../../../src/datasets/domain/models/NewDataset'; +import { NewDatasetDTO, NewDatasetMetadataFieldValueDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; describe('validate', () => { const sandbox: SinonSandbox = createSandbox(); @@ -19,7 +19,7 @@ describe('validate', () => { }); async function runValidateExpectingFieldValidationError( - newDataset: NewDataset, + newDataset: NewDatasetDTO, expectedMetadataFieldName: string, expectedErrorMessage: string, expectedParentMetadataFieldName?: string, @@ -42,7 +42,7 @@ describe('validate', () => { } test('should not raise a validation error when a new dataset with only the required fields is valid', async () => { - const testNewDataset = createNewDatasetModel(); + const testNewDataset = createNewDatasetDTO(); const sut = new NewDatasetValidator(); await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); @@ -50,15 +50,15 @@ describe('validate', () => { test('should raise an empty field error when a first level required string field is missing', async () => { await runValidateExpectingFieldValidationError( - createNewDatasetModelWithoutFirstLevelRequiredField(), + createNewDatasetDTOWithoutFirstLevelRequiredField(), 'author', 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', ); }); test('should raise an empty field error when a first level required array field is empty', async () => { - const invalidAuthorFieldValue: NewDatasetMetadataFieldValue = []; - const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); + const invalidAuthorFieldValue: NewDatasetMetadataFieldValueDTO = []; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'author', @@ -68,7 +68,7 @@ describe('validate', () => { test('should raise an error when the provided field value for an unique field is an array', async () => { const invalidTitleFieldValue = ['title1', 'title2']; - const testNewDataset = createNewDatasetModel(invalidTitleFieldValue, undefined, undefined); + const testNewDataset = createNewDatasetDTO(invalidTitleFieldValue, undefined, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'title', @@ -81,7 +81,7 @@ describe('validate', () => { invalidChildField1: 'invalid value 1', invalidChildField2: 'invalid value 2', }; - const testNewDataset = createNewDatasetModel(invalidTitleFieldValue, undefined, undefined); + const testNewDataset = createNewDatasetDTO(invalidTitleFieldValue, undefined, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'title', @@ -91,7 +91,7 @@ describe('validate', () => { test('should raise an error when the provided field value for a multiple field is a string', async () => { const invalidAuthorFieldValue = 'invalidValue'; - const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'author', @@ -101,7 +101,7 @@ describe('validate', () => { test('should raise an error when the provided field value is an array of strings and the field expects an array of objects', async () => { const invalidAuthorFieldValue = ['invalidValue1', 'invalidValue2']; - const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'author', @@ -120,7 +120,7 @@ describe('validate', () => { invalidChildField2: 'invalid value 2', }, ]; - const testNewDataset = createNewDatasetModel(undefined, undefined, invalidAlternativeTitleFieldValue); + const testNewDataset = createNewDatasetDTO(undefined, undefined, invalidAlternativeTitleFieldValue); await runValidateExpectingFieldValidationError( testNewDataset, 'alternativeRequiredTitle', @@ -138,7 +138,7 @@ describe('validate', () => { authorAffiliation: 'Dataverse.org', }, ]; - const testNewDataset = createNewDatasetModel(undefined, invalidAuthorFieldValue, undefined); + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); await runValidateExpectingFieldValidationError( testNewDataset, 'authorName', @@ -158,13 +158,13 @@ describe('validate', () => { authorName: 'John, Doe', }, ]; - const testNewDataset = createNewDatasetModel(undefined, authorFieldValue, undefined); + const testNewDataset = createNewDatasetDTO(undefined, authorFieldValue, undefined); const sut = new NewDatasetValidator(); await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); test('should raise a date format validation error when a date field has an invalid format', async () => { - const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, '1-1-2020'); + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, '1-1-2020'); await runValidateExpectingFieldValidationError( testNewDataset, 'timePeriodCoveredStart', @@ -173,13 +173,13 @@ describe('validate', () => { }); test('should not raise a date format validation error when a date field has a valid format', async () => { - const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, '2020-01-01'); + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, '2020-01-01'); const sut = new NewDatasetValidator(); await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); test('should raise a controlled vocabulary error when a controlled vocabulary field has an invalid format', async () => { - const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, undefined, 'Wrong Value'); + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, undefined, 'Wrong Value'); await runValidateExpectingFieldValidationError( testNewDataset, 'contributorType', @@ -190,7 +190,7 @@ describe('validate', () => { }); test('should not raise a controlled vocabulary error when the value for a controlled vocabulary field is correct', async () => { - const testNewDataset = createNewDatasetModel(undefined, undefined, undefined, undefined, 'Project Member'); + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, undefined, 'Project Member'); const sut = new NewDatasetValidator(); await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); diff --git a/test/unit/datasets/newDatasetTransformers.test.ts b/test/unit/datasets/newDatasetTransformers.test.ts index 6804b736..93e19d9b 100644 --- a/test/unit/datasets/newDatasetTransformers.test.ts +++ b/test/unit/datasets/newDatasetTransformers.test.ts @@ -1,14 +1,14 @@ import { assert } from 'sinon'; import { createNewDatasetMetadataBlockModel, - createNewDatasetModel, + createNewDatasetDTO, createNewDatasetRequestPayload, } from '../../testHelpers/datasets/newDatasetHelper'; import { transformNewDatasetModelToRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; describe('transformNewDatasetModelToRequestPayload', () => { test('should correctly transform a new dataset model to a new dataset request payload', async () => { - const testNewDataset = createNewDatasetModel(); + const testNewDataset = createNewDatasetDTO(); const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; const expectedNewDatasetRequestPayload = createNewDatasetRequestPayload(); const actual = transformNewDatasetModelToRequestPayload(testNewDataset, testMetadataBlocks); From 53bc05f3d90865af84d713fae756be2bb8e8a792 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 22 Jan 2024 15:11:39 +0000 Subject: [PATCH 35/96] Fixed: CreateDataset dependency calls --- .../repositories/IDatasetsRepository.ts | 6 +- src/datasets/domain/useCases/CreateDataset.ts | 17 ++--- .../validators/NewDatasetValidator.ts | 4 +- .../transformers/newDatasetTransformers.ts | 3 +- test/unit/datasets/CreateDataset.test.ts | 65 ++++++++++++++----- 5 files changed, 66 insertions(+), 29 deletions(-) diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index 41c7ca78..fa3c697d 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -15,5 +15,9 @@ export interface IDatasetsRepository { getDatasetUserPermissions(datasetId: number | string): Promise; getDatasetLocks(datasetId: number | string): Promise; getAllDatasetPreviews(limit?: number, offset?: number): Promise; - createDataset(newDataset: NewDatasetDTO, datasetMetadataBlocks: MetadataBlock[], collectionId: string): Promise; + createDataset( + newDataset: NewDatasetDTO, + datasetMetadataBlocks: MetadataBlock[], + collectionId: string, + ): Promise; } diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts index da73e517..ca352d82 100644 --- a/src/datasets/domain/useCases/CreateDataset.ts +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -23,20 +23,17 @@ export class CreateDataset implements UseCase { async execute(newDataset: NewDatasetDTO, collectionId: string = 'root'): Promise { const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset); - return await this.newDatasetValidator.validate(newDataset, metadataBlocks).then(async () => { - return await this.datasetsRepository.createDataset(newDataset, metadataBlocks, collectionId); - }); + this.newDatasetValidator.validate(newDataset, metadataBlocks); + return this.datasetsRepository.createDataset(newDataset, metadataBlocks, collectionId); } async getNewDatasetMetadataBlocks(newDataset: NewDatasetDTO): Promise { let metadataBlocks: MetadataBlock[] = []; - for (const metadataBlockValue in newDataset.metadataBlockValues) { - metadataBlocks.push( - await this.metadataBlocksRepository.getMetadataBlockByName( - (metadataBlockValue as unknown as NewDatasetMetadataBlockValuesDTO).name, - ), - ); - } + await Promise.all( + newDataset.metadataBlockValues.map(async (metadataBlockValue: NewDatasetMetadataBlockValuesDTO) => { + metadataBlocks.push(await this.metadataBlocksRepository.getMetadataBlockByName(metadataBlockValue.name)); + }), + ); return metadataBlocks; } } diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index d5f63692..6c969610 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -203,7 +203,9 @@ export class NewDatasetValidator implements NewResourceValidator { } private isEmptyArray(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { - return Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0; + return ( + Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0 + ); } private isValidArrayType( diff --git a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts index b4ff1f37..5ae104b5 100644 --- a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts +++ b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts @@ -71,7 +71,8 @@ export const transformMetadataFieldModelsToRequestPayload = ( ): MetadataFieldRequestPayload[] => { let metadataFieldsRequestPayload: MetadataFieldRequestPayload[] = []; for (const metadataFieldKey of Object.keys(newDatasetMetadataFields)) { - const newDatasetMetadataChildFieldValue: NewDatasetMetadataFieldValueDTO = newDatasetMetadataFields[metadataFieldKey]; + const newDatasetMetadataChildFieldValue: NewDatasetMetadataFieldValueDTO = + newDatasetMetadataFields[metadataFieldKey]; metadataFieldsRequestPayload.push({ value: transformMetadataFieldValueToRequestPayload( newDatasetMetadataChildFieldValue, diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts index 6cfd4544..f3f91388 100644 --- a/test/unit/datasets/CreateDataset.test.ts +++ b/test/unit/datasets/CreateDataset.test.ts @@ -5,7 +5,7 @@ import { assert, createSandbox, SinonSandbox } from 'sinon'; import { NewResourceValidator } from '../../../src/core/domain/useCases/validators/NewResourceValidator'; import { createNewDatasetDTO, createNewDatasetMetadataBlockModel } from '../../testHelpers/datasets/newDatasetHelper'; import { ResourceValidationError } from '../../../src/core/domain/useCases/validators/errors/ResourceValidationError'; -import { WriteError } from '../../../src'; +import { WriteError, ReadError } from '../../../src'; import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; describe('execute', () => { @@ -17,13 +17,6 @@ describe('execute', () => { sandbox.restore(); }); - function setupMetadataBlocksRepositoryStub(): IMetadataBlocksRepository { - const metadataBlocksRepositoryStub = {}; - const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); - metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; - return metadataBlocksRepositoryStub; - } - test('should return new dataset identifiers when validation is successful and repository call is successful', async () => { const testCreatedDatasetIdentifiers: CreatedDatasetIdentifiers = { persistentId: 'test', @@ -38,12 +31,17 @@ describe('execute', () => { const validateStub = sandbox.stub().resolves(); newDatasetValidatorStub.validate = validateStub; - const sut = new CreateDataset(datasetsRepositoryStub, setupMetadataBlocksRepositoryStub(), newDatasetValidatorStub); + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryStub, metadataBlocksRepositoryStub, newDatasetValidatorStub); const actual = await sut.execute(testDataset); assert.match(actual, testCreatedDatasetIdentifiers); + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root'); @@ -51,22 +49,27 @@ describe('execute', () => { }); test('should throw ResourceValidationError and not call repository when validation is unsuccessful', async () => { - const datasetsRepositoryStub = {}; - const createDatasetStub = sandbox.stub(); - datasetsRepositoryStub.createDataset = createDatasetStub; + const datasetsRepositoryMock = {}; + const createDatasetMock = sandbox.stub(); + datasetsRepositoryMock.createDataset = createDatasetMock; const newDatasetValidatorStub = {}; const testValidationError = new ResourceValidationError('Test error'); const validateStub = sandbox.stub().throwsException(testValidationError); newDatasetValidatorStub.validate = validateStub; - const sut = new CreateDataset(datasetsRepositoryStub, setupMetadataBlocksRepositoryStub(), newDatasetValidatorStub); + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryMock, metadataBlocksRepositoryStub, newDatasetValidatorStub); let actualError: ResourceValidationError = undefined; await sut.execute(testDataset).catch((e) => (actualError = e)); assert.match(actualError, testValidationError); + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); - assert.notCalled(createDatasetStub); + assert.notCalled(createDatasetMock); }); test('should throw WriteError when validation is successful and repository raises an error', async () => { @@ -79,14 +82,44 @@ describe('execute', () => { const validateMock = sandbox.stub().resolves(); newDatasetValidatorStub.validate = validateMock; - const sut = new CreateDataset(datasetsRepositoryStub, setupMetadataBlocksRepositoryStub(), newDatasetValidatorStub); - let actualError: ResourceValidationError = undefined; + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryStub, metadataBlocksRepositoryStub, newDatasetValidatorStub); + let actualError: WriteError = undefined; await sut.execute(testDataset).catch((e) => (actualError = e)); assert.match(actualError, testWriteError); + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); assert.calledWithExactly(validateMock, testDataset, testMetadataBlocks); assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root'); assert.callOrder(validateMock, createDatasetStub); }); + + test('should throw ReadError when metadata blocks repository raises an error', async () => { + const datasetsRepositoryMock = {}; + const createDatasetMock = sandbox.stub(); + datasetsRepositoryMock.createDataset = createDatasetMock; + + const newDatasetValidatorMock = {}; + const validateMock = sandbox.stub().resolves(); + newDatasetValidatorMock.validate = validateMock; + + const metadataBlocksRepositoryStub = {}; + const testReadError = new ReadError('Test error'); + const getMetadataBlockByNameStub = sandbox.stub().throwsException(testReadError); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryMock, metadataBlocksRepositoryStub, newDatasetValidatorMock); + let actualError: ReadError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testReadError); + + assert.notCalled(validateMock); + assert.notCalled(createDatasetMock); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + }); }); From bf8fc087d9a5ee091f737fb6dc1eea578bb17496 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 22 Jan 2024 16:00:12 +0000 Subject: [PATCH 36/96] Refactor: metadata field validators --- .../validators/BaseMetadataFieldValidator.ts | 101 +++++++++ .../validators/MetadataFieldValidator.ts | 50 +++++ .../MultipleMetadataFieldValidator.ts | 53 +++++ .../validators/NewDatasetValidator.ts | 207 +----------------- .../SingleMetadataFieldValidator.ts | 30 +++ 5 files changed, 238 insertions(+), 203 deletions(-) create mode 100644 src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts create mode 100644 src/datasets/domain/useCases/validators/MetadataFieldValidator.ts create mode 100644 src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts create mode 100644 src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts diff --git a/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts new file mode 100644 index 00000000..1160b086 --- /dev/null +++ b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts @@ -0,0 +1,101 @@ +import { ControlledVocabularyFieldError } from './errors/ControlledVocabularyFieldError'; +import { DateFormatFieldError } from './errors/DateFormatFieldError'; +import { NewDatasetMetadataChildFieldValueDTO, NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { FieldValidationError } from './errors/FieldValidationError'; +import { MetadataFieldValidator } from './MetadataFieldValidator'; +import { MetadataFieldInfo } from '../../../../metadataBlocks'; + +export interface NewDatasetMetadataFieldAndValueInfo { + metadataFieldInfo: MetadataFieldInfo; + metadataFieldKey: string; + metadataFieldValue: NewDatasetMetadataFieldValueDTO; + metadataBlockName: string; + metadataParentFieldKey?: string; + metadataFieldPosition?: number; +} + +export abstract class BaseMetadataFieldValidator { + abstract validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void; + + protected executeMetadataFieldValidator( + metadataFieldValidator: BaseMetadataFieldValidator, + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + ) { + metadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); + } + + protected validateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (metadataFieldInfo.isControlledVocabulary) { + this.validateControlledVocabularyFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + if (metadataFieldInfo.type == 'DATE') { + this.validateDateFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + if (metadataFieldInfo.childMetadataFields != undefined) { + this.validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo); + } + } + + protected validateControlledVocabularyFieldValue( + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + ) { + if ( + !newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.controlledVocabularyValues.includes( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string, + ) + ) { + throw new ControlledVocabularyFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } + } + + protected validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; + if (!dateFormatRegex.test(newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string)) { + throw new DateFormatFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } + } + + protected validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); + for (const childMetadataFieldKey of childMetadataFieldKeys) { + const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; + this.executeMetadataFieldValidator(new MetadataFieldValidator(), { + metadataFieldInfo: childMetadataFieldInfo, + metadataFieldKey: childMetadataFieldKey, + metadataFieldValue: ( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO + )[childMetadataFieldKey], + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + }); + } + } + + protected createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + reason: string, + ): FieldValidationError { + return new FieldValidationError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + reason, + ); + } +} diff --git a/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts new file mode 100644 index 00000000..3ee1ed17 --- /dev/null +++ b/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts @@ -0,0 +1,50 @@ +import { + BaseMetadataFieldValidator, + NewDatasetMetadataFieldAndValueInfo, +} from './BaseMetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from './MultipleMetadataFieldValidator'; +import { SingleMetadataFieldValidator } from './SingleMetadataFieldValidator'; +import { EmptyFieldError } from './errors/EmptyFieldError'; +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; + +export class MetadataFieldValidator extends BaseMetadataFieldValidator { + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if ( + metadataFieldValue == undefined || + metadataFieldValue == null || + this.isEmptyString(metadataFieldValue) || + this.isEmptyArray(metadataFieldValue) + ) { + if (metadataFieldInfo.isRequired) { + throw new EmptyFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } else { + return; + } + } + if (newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.multiple) { + this.executeMetadataFieldValidator( + new MultipleMetadataFieldValidator(), + newDatasetMetadataFieldAndValueInfo, + ); + } else { + this.executeMetadataFieldValidator(new SingleMetadataFieldValidator(), newDatasetMetadataFieldAndValueInfo); + } + } + + private isEmptyString(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { + return typeof metadataFieldValue == 'string' && metadataFieldValue.trim() === ''; + } + + private isEmptyArray(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { + return ( + Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0 + ); + } +} diff --git a/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts new file mode 100644 index 00000000..bf5b3b25 --- /dev/null +++ b/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts @@ -0,0 +1,53 @@ +import { + BaseMetadataFieldValidator, + NewDatasetMetadataFieldAndValueInfo, +} from './BaseMetadataFieldValidator'; +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; + +export class MultipleMetadataFieldValidator extends BaseMetadataFieldValidator { + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (!Array.isArray(metadataFieldValue)) { + throw this.createGeneralValidationError(newDatasetMetadataFieldAndValueInfo, 'Expecting an array of values.'); + } + if (this.isValidArrayType(metadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting an array of child fields, not strings.', + ); + } else if (this.isValidArrayType(metadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting an array of strings, not child fields.', + ); + } else if ( + !this.isValidArrayType(metadataFieldValue, 'object') && + !this.isValidArrayType(metadataFieldValue, 'string') + ) { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'The provided array of values is not valid.', + ); + } + + const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValueDTO[]; + fieldValues.forEach((value, metadataFieldPosition) => { + this.validateFieldValue({ + metadataFieldInfo: metadataFieldInfo, + metadataFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldValue: value, + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: metadataFieldPosition, + }); + }); + } + + private isValidArrayType( + metadataFieldValue: Array, + expectedType: 'string' | 'object', + ): boolean { + return metadataFieldValue.every((item: string | NewDatasetMetadataFieldValueDTO) => typeof item === expectedType); + } +} diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 6c969610..3bfe5622 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -1,25 +1,8 @@ -import { - NewDatasetDTO, - NewDatasetMetadataFieldValueDTO, - NewDatasetMetadataChildFieldValueDTO, - NewDatasetMetadataBlockValuesDTO, -} from '../../dtos/NewDatasetDTO'; +import { NewDatasetDTO, NewDatasetMetadataBlockValuesDTO } from '../../dtos/NewDatasetDTO'; import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; -import { MetadataFieldInfo, MetadataBlock } from '../../../../metadataBlocks'; +import { MetadataBlock } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; -import { EmptyFieldError } from './errors/EmptyFieldError'; -import { FieldValidationError } from './errors/FieldValidationError'; -import { ControlledVocabularyFieldError } from './errors/ControlledVocabularyFieldError'; -import { DateFormatFieldError } from './errors/DateFormatFieldError'; - -export interface NewDatasetMetadataFieldAndValueInfo { - metadataFieldInfo: MetadataFieldInfo; - metadataFieldKey: string; - metadataFieldValue: NewDatasetMetadataFieldValueDTO; - metadataBlockName: string; - metadataParentFieldKey?: string; - metadataFieldPosition?: number; -} +import { MetadataFieldValidator } from './MetadataFieldValidator'; export class NewDatasetValidator implements NewResourceValidator { async validate(resource: NewDatasetDTO, metadataBlocks: MetadataBlock[]): Promise { @@ -37,7 +20,7 @@ export class NewDatasetValidator implements NewResourceValidator { (metadataBlock) => metadataBlock.name === metadataBlockName, ); for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { - this.validateMetadataField({ + new MetadataFieldValidator().validate({ metadataFieldInfo: metadataBlock.metadataFields[metadataFieldKey], metadataFieldKey: metadataFieldKey, metadataFieldValue: metadataBlockValues.fields[metadataFieldKey], @@ -45,186 +28,4 @@ export class NewDatasetValidator implements NewResourceValidator { }); } } - - private validateMetadataField(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { - const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; - const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; - if ( - metadataFieldValue == undefined || - metadataFieldValue == null || - this.isEmptyString(metadataFieldValue) || - this.isEmptyArray(metadataFieldValue) - ) { - if (metadataFieldInfo.isRequired) { - throw new EmptyFieldError( - newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataBlockName, - newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - ); - } else { - return; - } - } - if (metadataFieldInfo.multiple) { - this.validateMultipleMetadataField(newDatasetMetadataFieldAndValueInfo); - } else { - this.validateSingleMetadataField(newDatasetMetadataFieldAndValueInfo); - } - } - - private validateMultipleMetadataField(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; - const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; - if (!Array.isArray(metadataFieldValue)) { - throw this.createGeneralValidationError(newDatasetMetadataFieldAndValueInfo, 'Expecting an array of values.'); - } - if (this.isValidArrayType(metadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { - throw this.createGeneralValidationError( - newDatasetMetadataFieldAndValueInfo, - 'Expecting an array of child fields, not strings.', - ); - } else if (this.isValidArrayType(metadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { - throw this.createGeneralValidationError( - newDatasetMetadataFieldAndValueInfo, - 'Expecting an array of strings, not child fields.', - ); - } else if ( - !this.isValidArrayType(metadataFieldValue, 'object') && - !this.isValidArrayType(metadataFieldValue, 'string') - ) { - throw this.createGeneralValidationError( - newDatasetMetadataFieldAndValueInfo, - 'The provided array of values is not valid.', - ); - } - - const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValueDTO[]; - fieldValues.forEach((value, metadataFieldPosition) => { - this.validateFieldValue({ - metadataFieldInfo: metadataFieldInfo, - metadataFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - metadataFieldValue: value, - metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, - metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - metadataFieldPosition: metadataFieldPosition, - }); - }); - } - - private validateSingleMetadataField(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; - const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; - if (Array.isArray(metadataFieldValue)) { - throw this.createGeneralValidationError( - newDatasetMetadataFieldAndValueInfo, - 'Expecting a single field, not an array.', - ); - } - if (typeof metadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { - throw this.createGeneralValidationError( - newDatasetMetadataFieldAndValueInfo, - 'Expecting a string, not child fields.', - ); - } - if (typeof metadataFieldValue === 'string' && metadataFieldInfo.type === 'NONE') { - throw this.createGeneralValidationError( - newDatasetMetadataFieldAndValueInfo, - 'Expecting child fields, not a string.', - ); - } - this.validateFieldValue(newDatasetMetadataFieldAndValueInfo); - } - - private validateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; - if (metadataFieldInfo.isControlledVocabulary) { - this.validateControlledVocabularyFieldValue(newDatasetMetadataFieldAndValueInfo); - } - - if (metadataFieldInfo.type == 'DATE') { - this.validateDateFieldValue(newDatasetMetadataFieldAndValueInfo); - } - - if (metadataFieldInfo.childMetadataFields != undefined) { - this.validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo); - } - } - - private validateControlledVocabularyFieldValue( - newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, - ) { - if ( - !newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.controlledVocabularyValues.includes( - newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string, - ) - ) { - throw new ControlledVocabularyFieldError( - newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataBlockName, - newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - ); - } - } - - private validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; - if (!dateFormatRegex.test(newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string)) { - throw new DateFormatFieldError( - newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataBlockName, - newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - ); - } - } - - private validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; - const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); - for (const childMetadataFieldKey of childMetadataFieldKeys) { - const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; - this.validateMetadataField({ - metadataFieldInfo: childMetadataFieldInfo, - metadataFieldKey: childMetadataFieldKey, - metadataFieldValue: ( - newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO - )[childMetadataFieldKey], - metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, - metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - }); - } - } - - private isEmptyString(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { - return typeof metadataFieldValue == 'string' && metadataFieldValue.trim() === ''; - } - - private isEmptyArray(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { - return ( - Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0 - ); - } - - private isValidArrayType( - metadataFieldValue: Array, - expectedType: 'string' | 'object', - ): boolean { - return metadataFieldValue.every((item: string | NewDatasetMetadataFieldValueDTO) => typeof item === expectedType); - } - - private createGeneralValidationError( - newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, - reason: string, - ): FieldValidationError { - return new FieldValidationError( - newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataBlockName, - newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - reason, - ); - } } diff --git a/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts new file mode 100644 index 00000000..11197607 --- /dev/null +++ b/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts @@ -0,0 +1,30 @@ +import { + BaseMetadataFieldValidator, + NewDatasetMetadataFieldAndValueInfo, +} from './BaseMetadataFieldValidator'; + +export class SingleMetadataFieldValidator extends BaseMetadataFieldValidator { + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (Array.isArray(metadataFieldValue)) { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting a single field, not an array.', + ); + } + if (typeof metadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting a string, not child fields.', + ); + } + if (typeof metadataFieldValue === 'string' && metadataFieldInfo.type === 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting child fields, not a string.', + ); + } + this.validateFieldValue(newDatasetMetadataFieldAndValueInfo); + } +} From 2860270c7a8d3502d9fdade91082ff002db3c5a7 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 22 Jan 2024 16:03:33 +0000 Subject: [PATCH 37/96] Changed: protected methods now private --- .../validators/BaseMetadataFieldValidator.ts | 66 +++++++++---------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts index 1160b086..9a04f0e6 100644 --- a/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts +++ b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts @@ -24,6 +24,37 @@ export abstract class BaseMetadataFieldValidator { metadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); } + protected validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); + for (const childMetadataFieldKey of childMetadataFieldKeys) { + const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; + this.executeMetadataFieldValidator(new MetadataFieldValidator(), { + metadataFieldInfo: childMetadataFieldInfo, + metadataFieldKey: childMetadataFieldKey, + metadataFieldValue: ( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO + )[childMetadataFieldKey], + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + }); + } + } + + protected createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + reason: string, + ): FieldValidationError { + return new FieldValidationError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + reason, + ); + } + protected validateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; if (metadataFieldInfo.isControlledVocabulary) { @@ -39,7 +70,7 @@ export abstract class BaseMetadataFieldValidator { } } - protected validateControlledVocabularyFieldValue( + private validateControlledVocabularyFieldValue( newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, ) { if ( @@ -56,7 +87,7 @@ export abstract class BaseMetadataFieldValidator { } } - protected validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + private validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; if (!dateFormatRegex.test(newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string)) { throw new DateFormatFieldError( @@ -67,35 +98,4 @@ export abstract class BaseMetadataFieldValidator { ); } } - - protected validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; - const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); - for (const childMetadataFieldKey of childMetadataFieldKeys) { - const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; - this.executeMetadataFieldValidator(new MetadataFieldValidator(), { - metadataFieldInfo: childMetadataFieldInfo, - metadataFieldKey: childMetadataFieldKey, - metadataFieldValue: ( - newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO - )[childMetadataFieldKey], - metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, - metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - }); - } - } - - protected createGeneralValidationError( - newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, - reason: string, - ): FieldValidationError { - return new FieldValidationError( - newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataBlockName, - newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - reason, - ); - } } From ce8e391fe1050ed15eaf8c336b219808783c3b79 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 22 Jan 2024 16:07:53 +0000 Subject: [PATCH 38/96] Changed: docker env vars to use docker.io and unstable --- test/integration/environment/.env | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/environment/.env b/test/integration/environment/.env index 2141e353..80e9a14e 100644 --- a/test/integration/environment/.env +++ b/test/integration/environment/.env @@ -1,6 +1,6 @@ POSTGRES_VERSION=13 DATAVERSE_DB_USER=dataverse SOLR_VERSION=9.3.0 -DATAVERSE_IMAGE_REGISTRY=ghcr.io -DATAVERSE_IMAGE_TAG=10216-metadatablocks-payload-extension +DATAVERSE_IMAGE_REGISTRY=docker.io +DATAVERSE_IMAGE_TAG=unstable DATAVERSE_BOOTSTRAP_TIMEOUT=5m From 938ab8fa95b50dfd34248fefd7ddc171f1ff93fa Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 23 Jan 2024 15:50:17 +0000 Subject: [PATCH 39/96] Refactor: replaced inheritance with composition in metadata field validators --- .../validators/BaseMetadataFieldValidator.ts | 74 +------------------ .../validators/MetadataFieldValidator.ts | 19 ++--- .../MultipleMetadataFieldValidator.ts | 12 +-- .../validators/NewDatasetValidator.ts | 6 +- .../SingleMetadataFieldValidator.ts | 73 +++++++++++++++++- src/datasets/index.ts | 14 +++- .../unit/datasets/NewDatasetValidator.test.ts | 21 +++--- 7 files changed, 114 insertions(+), 105 deletions(-) diff --git a/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts index 9a04f0e6..717f2b4c 100644 --- a/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts +++ b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts @@ -1,8 +1,5 @@ -import { ControlledVocabularyFieldError } from './errors/ControlledVocabularyFieldError'; -import { DateFormatFieldError } from './errors/DateFormatFieldError'; -import { NewDatasetMetadataChildFieldValueDTO, NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; import { FieldValidationError } from './errors/FieldValidationError'; -import { MetadataFieldValidator } from './MetadataFieldValidator'; import { MetadataFieldInfo } from '../../../../metadataBlocks'; export interface NewDatasetMetadataFieldAndValueInfo { @@ -17,31 +14,6 @@ export interface NewDatasetMetadataFieldAndValueInfo { export abstract class BaseMetadataFieldValidator { abstract validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void; - protected executeMetadataFieldValidator( - metadataFieldValidator: BaseMetadataFieldValidator, - newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, - ) { - metadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); - } - - protected validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; - const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); - for (const childMetadataFieldKey of childMetadataFieldKeys) { - const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; - this.executeMetadataFieldValidator(new MetadataFieldValidator(), { - metadataFieldInfo: childMetadataFieldInfo, - metadataFieldKey: childMetadataFieldKey, - metadataFieldValue: ( - newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO - )[childMetadataFieldKey], - metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, - metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - }); - } - } - protected createGeneralValidationError( newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, reason: string, @@ -54,48 +26,4 @@ export abstract class BaseMetadataFieldValidator { reason, ); } - - protected validateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; - if (metadataFieldInfo.isControlledVocabulary) { - this.validateControlledVocabularyFieldValue(newDatasetMetadataFieldAndValueInfo); - } - - if (metadataFieldInfo.type == 'DATE') { - this.validateDateFieldValue(newDatasetMetadataFieldAndValueInfo); - } - - if (metadataFieldInfo.childMetadataFields != undefined) { - this.validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo); - } - } - - private validateControlledVocabularyFieldValue( - newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, - ) { - if ( - !newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.controlledVocabularyValues.includes( - newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string, - ) - ) { - throw new ControlledVocabularyFieldError( - newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataBlockName, - newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - ); - } - } - - private validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { - const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; - if (!dateFormatRegex.test(newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string)) { - throw new DateFormatFieldError( - newDatasetMetadataFieldAndValueInfo.metadataFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataBlockName, - newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, - newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, - ); - } - } } diff --git a/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts index 3ee1ed17..35111901 100644 --- a/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts +++ b/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts @@ -1,13 +1,17 @@ -import { - BaseMetadataFieldValidator, - NewDatasetMetadataFieldAndValueInfo, -} from './BaseMetadataFieldValidator'; +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; import { MultipleMetadataFieldValidator } from './MultipleMetadataFieldValidator'; import { SingleMetadataFieldValidator } from './SingleMetadataFieldValidator'; import { EmptyFieldError } from './errors/EmptyFieldError'; import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; export class MetadataFieldValidator extends BaseMetadataFieldValidator { + constructor( + private singleMetadataFieldValidator: SingleMetadataFieldValidator, + private multipleMetadataFieldValidator: MultipleMetadataFieldValidator, + ) { + super(); + } + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; @@ -29,12 +33,9 @@ export class MetadataFieldValidator extends BaseMetadataFieldValidator { } } if (newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.multiple) { - this.executeMetadataFieldValidator( - new MultipleMetadataFieldValidator(), - newDatasetMetadataFieldAndValueInfo, - ); + this.multipleMetadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); } else { - this.executeMetadataFieldValidator(new SingleMetadataFieldValidator(), newDatasetMetadataFieldAndValueInfo); + this.singleMetadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); } } diff --git a/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts index bf5b3b25..ae0be7fa 100644 --- a/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts +++ b/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts @@ -1,10 +1,12 @@ -import { - BaseMetadataFieldValidator, - NewDatasetMetadataFieldAndValueInfo, -} from './BaseMetadataFieldValidator'; +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { SingleMetadataFieldValidator } from './SingleMetadataFieldValidator'; export class MultipleMetadataFieldValidator extends BaseMetadataFieldValidator { + constructor(private singleMetadataFieldValidator: SingleMetadataFieldValidator) { + super(); + } + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; @@ -33,7 +35,7 @@ export class MultipleMetadataFieldValidator extends BaseMetadataFieldValidator { const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValueDTO[]; fieldValues.forEach((value, metadataFieldPosition) => { - this.validateFieldValue({ + this.singleMetadataFieldValidator.validate({ metadataFieldInfo: metadataFieldInfo, metadataFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, metadataFieldValue: value, diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts index 3bfe5622..7911211a 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetValidator.ts @@ -2,9 +2,11 @@ import { NewDatasetDTO, NewDatasetMetadataBlockValuesDTO } from '../../dtos/NewD import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; import { MetadataBlock } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; -import { MetadataFieldValidator } from './MetadataFieldValidator'; +import { BaseMetadataFieldValidator } from './BaseMetadataFieldValidator'; export class NewDatasetValidator implements NewResourceValidator { + constructor(private metadataFieldValidator: BaseMetadataFieldValidator) {} + async validate(resource: NewDatasetDTO, metadataBlocks: MetadataBlock[]): Promise { for (const metadataBlockValues of resource.metadataBlockValues) { await this.validateMetadataBlock(metadataBlockValues, metadataBlocks); @@ -20,7 +22,7 @@ export class NewDatasetValidator implements NewResourceValidator { (metadataBlock) => metadataBlock.name === metadataBlockName, ); for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { - new MetadataFieldValidator().validate({ + this.metadataFieldValidator.validate({ metadataFieldInfo: metadataBlock.metadataFields[metadataFieldKey], metadataFieldKey: metadataFieldKey, metadataFieldValue: metadataBlockValues.fields[metadataFieldKey], diff --git a/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts index 11197607..e338457c 100644 --- a/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts +++ b/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts @@ -1,7 +1,9 @@ -import { - BaseMetadataFieldValidator, - NewDatasetMetadataFieldAndValueInfo, -} from './BaseMetadataFieldValidator'; +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; +import { ControlledVocabularyFieldError } from './errors/ControlledVocabularyFieldError'; +import { DateFormatFieldError } from './errors/DateFormatFieldError'; +import { MetadataFieldValidator } from './MetadataFieldValidator'; +import { NewDatasetMetadataChildFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { MultipleMetadataFieldValidator } from './MultipleMetadataFieldValidator'; export class SingleMetadataFieldValidator extends BaseMetadataFieldValidator { validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { @@ -27,4 +29,67 @@ export class SingleMetadataFieldValidator extends BaseMetadataFieldValidator { } this.validateFieldValue(newDatasetMetadataFieldAndValueInfo); } + + private validateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (metadataFieldInfo.isControlledVocabulary) { + this.validateControlledVocabularyFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + if (metadataFieldInfo.type == 'DATE') { + this.validateDateFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + if (metadataFieldInfo.childMetadataFields != undefined) { + this.validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo); + } + } + + private validateControlledVocabularyFieldValue( + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + ) { + if ( + !newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.controlledVocabularyValues.includes( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string, + ) + ) { + throw new ControlledVocabularyFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } + } + + private validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; + if (!dateFormatRegex.test(newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string)) { + throw new DateFormatFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } + } + + private validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); + const metadataFieldValidator = new MetadataFieldValidator(this, new MultipleMetadataFieldValidator(this)); + for (const childMetadataFieldKey of childMetadataFieldKeys) { + const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; + metadataFieldValidator.validate({ + metadataFieldInfo: childMetadataFieldInfo, + metadataFieldKey: childMetadataFieldKey, + metadataFieldValue: ( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO + )[childMetadataFieldKey], + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + }); + } + } } diff --git a/src/datasets/index.ts b/src/datasets/index.ts index 5028cf1f..24b7c9a2 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -10,6 +10,9 @@ import { GetAllDatasetPreviews } from './domain/useCases/GetAllDatasetPreviews'; import { NewDatasetValidator } from './domain/useCases/validators/NewDatasetValidator'; import { MetadataBlocksRepository } from '../metadataBlocks/infra/repositories/MetadataBlocksRepository'; import { CreateDataset } from './domain/useCases/CreateDataset'; +import { MetadataFieldValidator } from './domain/useCases/validators/MetadataFieldValidator'; +import { SingleMetadataFieldValidator } from './domain/useCases/validators/SingleMetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from './domain/useCases/validators/MultipleMetadataFieldValidator'; const datasetsRepository = new DatasetsRepository(); @@ -21,7 +24,16 @@ const getPrivateUrlDatasetCitation = new GetPrivateUrlDatasetCitation(datasetsRe const getDatasetUserPermissions = new GetDatasetUserPermissions(datasetsRepository); const getDatasetLocks = new GetDatasetLocks(datasetsRepository); const getAllDatasetPreviews = new GetAllDatasetPreviews(datasetsRepository); -const createDataset = new CreateDataset(datasetsRepository, new MetadataBlocksRepository(), new NewDatasetValidator()); +const singleMetadataFieldValidator = new SingleMetadataFieldValidator(); +const metadataFieldValidator = new MetadataFieldValidator( + new SingleMetadataFieldValidator(), + new MultipleMetadataFieldValidator(singleMetadataFieldValidator), +); +const createDataset = new CreateDataset( + datasetsRepository, + new MetadataBlocksRepository(), + new NewDatasetValidator(metadataFieldValidator), +); export { getDatasetSummaryFieldNames, diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetValidator.test.ts index 82ca5274..b3be73ab 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetValidator.test.ts @@ -1,5 +1,5 @@ import { NewDatasetValidator } from '../../../src/datasets/domain/useCases/validators/NewDatasetValidator'; -import { assert, createSandbox, SinonSandbox } from 'sinon'; +import { assert } from 'sinon'; import { createNewDatasetDTO, createNewDatasetMetadataBlockModel, @@ -9,14 +9,19 @@ import { fail } from 'assert'; import { EmptyFieldError } from '../../../src/datasets/domain/useCases/validators/errors/EmptyFieldError'; import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError'; import { NewDatasetDTO, NewDatasetMetadataFieldValueDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; +import { SingleMetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/SingleMetadataFieldValidator'; +import { MetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/MetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator'; describe('validate', () => { - const sandbox: SinonSandbox = createSandbox(); const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; - afterEach(() => { - sandbox.restore(); - }); + const singleMetadataFieldValidator = new SingleMetadataFieldValidator(); + const metadataFieldValidator = new MetadataFieldValidator( + new SingleMetadataFieldValidator(), + new MultipleMetadataFieldValidator(singleMetadataFieldValidator), + ); + const sut = new NewDatasetValidator(metadataFieldValidator); async function runValidateExpectingFieldValidationError( newDataset: NewDatasetDTO, @@ -25,7 +30,6 @@ describe('validate', () => { expectedParentMetadataFieldName?: string, expectedPosition?: number, ): Promise { - const sut = new NewDatasetValidator(); await sut .validate(newDataset, testMetadataBlocks) .then(() => { @@ -43,8 +47,6 @@ describe('validate', () => { test('should not raise a validation error when a new dataset with only the required fields is valid', async () => { const testNewDataset = createNewDatasetDTO(); - const sut = new NewDatasetValidator(); - await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); @@ -159,7 +161,6 @@ describe('validate', () => { }, ]; const testNewDataset = createNewDatasetDTO(undefined, authorFieldValue, undefined); - const sut = new NewDatasetValidator(); await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); @@ -174,7 +175,6 @@ describe('validate', () => { test('should not raise a date format validation error when a date field has a valid format', async () => { const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, '2020-01-01'); - const sut = new NewDatasetValidator(); await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); @@ -191,7 +191,6 @@ describe('validate', () => { test('should not raise a controlled vocabulary error when the value for a controlled vocabulary field is correct', async () => { const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, undefined, 'Project Member'); - const sut = new NewDatasetValidator(); await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); }); }); From 3c5f1900b653042a303d9ad6a930253122ef6b80 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 23 Jan 2024 15:54:12 +0000 Subject: [PATCH 40/96] Changed: NewDatasetValidator renamed to NewDatasetResourceValidator --- ...{NewDatasetValidator.ts => NewDatasetResourceValidator.ts} | 2 +- src/datasets/index.ts | 4 ++-- ...tValidator.test.ts => NewDatasetResourceValidator.test.ts} | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) rename src/datasets/domain/useCases/validators/{NewDatasetValidator.ts => NewDatasetResourceValidator.ts} (95%) rename test/unit/datasets/{NewDatasetValidator.test.ts => NewDatasetResourceValidator.test.ts} (97%) diff --git a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts similarity index 95% rename from src/datasets/domain/useCases/validators/NewDatasetValidator.ts rename to src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts index 7911211a..bb07efe6 100644 --- a/src/datasets/domain/useCases/validators/NewDatasetValidator.ts +++ b/src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts @@ -4,7 +4,7 @@ import { MetadataBlock } from '../../../../metadataBlocks'; import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; import { BaseMetadataFieldValidator } from './BaseMetadataFieldValidator'; -export class NewDatasetValidator implements NewResourceValidator { +export class NewDatasetResourceValidator implements NewResourceValidator { constructor(private metadataFieldValidator: BaseMetadataFieldValidator) {} async validate(resource: NewDatasetDTO, metadataBlocks: MetadataBlock[]): Promise { diff --git a/src/datasets/index.ts b/src/datasets/index.ts index 24b7c9a2..b75784b6 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -7,7 +7,7 @@ import { GetPrivateUrlDatasetCitation } from './domain/useCases/GetPrivateUrlDat import { GetDatasetUserPermissions } from './domain/useCases/GetDatasetUserPermissions'; import { GetDatasetLocks } from './domain/useCases/GetDatasetLocks'; import { GetAllDatasetPreviews } from './domain/useCases/GetAllDatasetPreviews'; -import { NewDatasetValidator } from './domain/useCases/validators/NewDatasetValidator'; +import { NewDatasetResourceValidator } from './domain/useCases/validators/NewDatasetResourceValidator'; import { MetadataBlocksRepository } from '../metadataBlocks/infra/repositories/MetadataBlocksRepository'; import { CreateDataset } from './domain/useCases/CreateDataset'; import { MetadataFieldValidator } from './domain/useCases/validators/MetadataFieldValidator'; @@ -32,7 +32,7 @@ const metadataFieldValidator = new MetadataFieldValidator( const createDataset = new CreateDataset( datasetsRepository, new MetadataBlocksRepository(), - new NewDatasetValidator(metadataFieldValidator), + new NewDatasetResourceValidator(metadataFieldValidator), ); export { diff --git a/test/unit/datasets/NewDatasetValidator.test.ts b/test/unit/datasets/NewDatasetResourceValidator.test.ts similarity index 97% rename from test/unit/datasets/NewDatasetValidator.test.ts rename to test/unit/datasets/NewDatasetResourceValidator.test.ts index b3be73ab..3c0e1c59 100644 --- a/test/unit/datasets/NewDatasetValidator.test.ts +++ b/test/unit/datasets/NewDatasetResourceValidator.test.ts @@ -1,4 +1,4 @@ -import { NewDatasetValidator } from '../../../src/datasets/domain/useCases/validators/NewDatasetValidator'; +import { NewDatasetResourceValidator } from '../../../src/datasets/domain/useCases/validators/NewDatasetResourceValidator'; import { assert } from 'sinon'; import { createNewDatasetDTO, @@ -21,7 +21,7 @@ describe('validate', () => { new SingleMetadataFieldValidator(), new MultipleMetadataFieldValidator(singleMetadataFieldValidator), ); - const sut = new NewDatasetValidator(metadataFieldValidator); + const sut = new NewDatasetResourceValidator(metadataFieldValidator); async function runValidateExpectingFieldValidationError( newDataset: NewDatasetDTO, From e27af2d4a276d85569b9e9157aef11e83ef6570f Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 24 Jan 2024 08:05:40 +0000 Subject: [PATCH 41/96] Stash: new docs structure WIP --- README.md | 84 +++-------------------------------- docs/installation.md | 13 ++++++ docs/localDevelopment.md | 95 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 113 insertions(+), 79 deletions(-) create mode 100644 docs/installation.md create mode 100644 docs/localDevelopment.md diff --git a/README.md b/README.md index 85e21130..80680ee5 100644 --- a/README.md +++ b/README.md @@ -4,82 +4,8 @@ A JavaScript/TypeScript API wrapper for [Dataverse](http://guides.dataverse.org/en/latest/api/). -## NPM - -A stable 1.x version of this package is available as `js-dataverse` at https://www.npmjs.com/package/js-dataverse - -An unstable 2.x version of this package with breaking changes is under development. Until a 2.0 version is officially released, it can be installed from https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript - -## Getting Started - -This package is built using `node v19`, so it is recommended to use that version. - -Make sure that you install all the project dependencies: - -`npm install` - -## Build project - -In order to build the project, we need to run the following command: - -`npm run build` - -the build generated will be placed in `dist` folder. - -## Tests - -### Run all tests - -`npm run test` - -### Run unit tests - -`npm run test:unit` - -### Run integration tests - -`npm run test:integration` - -#### Configure the integration testing environment - -The integration testing environment is implemented with Test Containers and Docker Compose. The environment uses different environment variables, defined in a .env file, available in the _test/integration/environment_ folder. - -These environment variables can be updated as needed for integration testing. For example, we can specify the Dataverse image registry and tag, to point to the particular Dataverse image to test. - -- To test images generated in Dataverse PRs: Set `ghcr.io` as the image registry (DATAVERSE_IMAGE_REGISTRY) and the source branch name of a particular PR as the image tag (DATAVERSE_IMAGE_TAG). - -- To test the Dataverse develop branch: Set `docker.io` as the image registry (DATAVERSE_IMAGE_REGISTRY) and `unstable` as the image tag (DATAVERSE_IMAGE_TAG). - -### Run test coverage - -`npm run test:coverage` - -## Format and lint - -### Run formatter - -`npm run format` - -### Run linter - -Running a linting check on the code: - -`npm run lint` - -Fix linting checks on the code: - -`npm run lint:fix` - -## Publishing new version - -Automated publishing of versions could be automated when merging to master. Below are the steps that would be required to publish a new version: - -1. Run tests and checks -2. Build the project -3. Commit changes -4. Upgrade npm version -5. Publish, `npm publish` - -## Contributing - -We love contributors! Please see [CONTRIBUTING.md](CONTRIBUTING.md). +- [Installation](./docs/installation.md) +- [Use Cases Documentation](./docs/api.md) +- [Local Development](./docs/localDevelopment.md) +- [Contributing](./CONTRIBUTING.md) +- [License](./LICENSE) diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 00000000..f01cfbc1 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,13 @@ +# Installation + +A stable 1.x version of this package is available as `js-dataverse` at https://www.npmjs.com/package/js-dataverse + +Install the package stable version using npm: + +```bash +npm install js-dataverse +``` + +An unstable 2.x version of this package with breaking changes is under development. + +Until a 2.0 version is officially released, it can be installed from https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript diff --git a/docs/localDevelopment.md b/docs/localDevelopment.md new file mode 100644 index 00000000..9527a778 --- /dev/null +++ b/docs/localDevelopment.md @@ -0,0 +1,95 @@ +# Local Development + +To set up your local development environment for working on this project, follow these steps: + +## Prerequisites + +### Node.js and npm + +Make sure you have Node.js and npm installed on your machine. + +This package is built using `node v19`, so it is recommended to use that version. + +### Docker and Docker Compose + +We use [Test Containers](https://github.com/testcontainers/testcontainers-node) for running integration tests. + +In our Test Containers setup we use Docker Compose, as our tests involve multiple containers that need to be orchestrated together. + +If you want to run integration tests, you need Docker and Docker Compose installed on your machine. + +## Install Dependencies + +Make sure that you install all the project dependencies: + +```bash +npm install +``` + +## Build + +In order to build the project, we need to run the following command: + +```bash +npm run build +``` + +the build generated will be placed in `dist` folder. + +## Tests + +### Run all tests + +```bash +npm run test +``` + +### Run unit tests + +```bash +npm run test:unit +``` + +### Run integration tests + +```bash +npm run test:integration +``` + +#### Configure the integration testing environment + +The integration testing environment uses different environment variables, defined in a .env file, available in the _test/integration/environment_ folder. + +These environment variables can be updated as needed for integration testing. For example, we can specify the Dataverse image registry and tag, to point to the particular Dataverse image to test. + +- To test images generated in Dataverse PRs: Set `ghcr.io` as the image registry (DATAVERSE_IMAGE_REGISTRY) and the source branch name of a particular PR as the image tag (DATAVERSE_IMAGE_TAG). + +- To test the Dataverse develop branch: Set `docker.io` as the image registry (DATAVERSE_IMAGE_REGISTRY) and `unstable` as the image tag (DATAVERSE_IMAGE_TAG). + +### Run test coverage + +```bash +npm run test:coverage +``` + +## Format and lint + +### Run formatter + +```bash +npm run format +``` + +### Run linter + +Running a linting check on the code: + +```bash +npm run lint +``` + +Fix linting checks on the code: + +```bash +npm run lint:fix +``` \ No newline at end of file From 6fb2f1b261f308b46a580f01a8dbbd12b2d458d5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 24 Jan 2024 08:48:51 +0000 Subject: [PATCH 42/96] Added: new useCases section and Api Config installation subsection (empty) --- docs/installation.md | 6 ++++++ docs/useCases.md | 27 +++++++++++++++++++++++++++ 2 files changed, 33 insertions(+) create mode 100644 docs/useCases.md diff --git a/docs/installation.md b/docs/installation.md index f01cfbc1..564ed89d 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,5 +1,7 @@ # Installation +## NPM + A stable 1.x version of this package is available as `js-dataverse` at https://www.npmjs.com/package/js-dataverse Install the package stable version using npm: @@ -11,3 +13,7 @@ npm install js-dataverse An unstable 2.x version of this package with breaking changes is under development. Until a 2.0 version is officially released, it can be installed from https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript + +## API Config + +TODO diff --git a/docs/useCases.md b/docs/useCases.md new file mode 100644 index 00000000..23a30118 --- /dev/null +++ b/docs/useCases.md @@ -0,0 +1,27 @@ +# Use Cases + +In the context of Domain-Driven Design (DDD), a use case is a specific way to describe and capture a user's or system's interaction with the domain to achieve a particular goal. + +This package exposes the functionality in the form of use cases, with the main goal that any package consumer can easily identify the desired functionality. + +The different use cases currently available in the package are classified below, according to the subdomains they target: + +## Datasets + +TODO + +## Files + +TODO + +## Metadata Blocks + +TODO + +## Users + +TODO + +## Info + +TODO From 4e63e54e5c8162977577adb2fb4422b86b4161e3 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 24 Jan 2024 09:19:42 +0000 Subject: [PATCH 43/96] Stash: initialization docs WIP --- README.md | 2 +- docs/installation.md | 16 ++++++++++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 80680ee5..97440490 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ A JavaScript/TypeScript API wrapper for [Dataverse](http://guides.dataverse.org/en/latest/api/). - [Installation](./docs/installation.md) -- [Use Cases Documentation](./docs/api.md) +- [Use Cases](./docs/useCases.md) - [Local Development](./docs/localDevelopment.md) - [Contributing](./CONTRIBUTING.md) - [License](./LICENSE) diff --git a/docs/installation.md b/docs/installation.md index 564ed89d..00520cd8 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -2,6 +2,8 @@ ## NPM +### Stable version + A stable 1.x version of this package is available as `js-dataverse` at https://www.npmjs.com/package/js-dataverse Install the package stable version using npm: @@ -10,10 +12,20 @@ Install the package stable version using npm: npm install js-dataverse ``` +### Development versions + An unstable 2.x version of this package with breaking changes is under development. Until a 2.0 version is officially released, it can be installed from https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript -## API Config +## Initialization + +In order for the package to connect to the Dataverse API, there is an `APIConfig` object that should be initialized to set the preferred authentication mechanism with the associated credentials for connecting to the Dataverse API. + +Currently, the supported authentication mechanisms are: + +- API Key: The recommended mechanism and the original one from the initial package versions. The API Key should correspond to a particular Dataverse user. + +- Session Cookie: This is an experimental feature primarily designed for Dataverse SPA development. It is necessary to enable the corresponding feature flag in the Dataverse installation (See https://guides.dataverse.org/en/latest/installation/config.html?#feature-flags). It is recommended not to use this mechanism. -TODO +TODO \ No newline at end of file From e79e4d17d27a50edd9429127662a9412afd370a6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 24 Jan 2024 15:25:42 +0000 Subject: [PATCH 44/96] Added: info to installation/initialization section --- docs/installation.md | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/docs/installation.md b/docs/installation.md index 00520cd8..d3177635 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -24,8 +24,36 @@ In order for the package to connect to the Dataverse API, there is an `APIConfig Currently, the supported authentication mechanisms are: -- API Key: The recommended mechanism and the original one from the initial package versions. The API Key should correspond to a particular Dataverse user. +- **API Key**: The recommended authentication mechanism. The API Key should correspond to a particular Dataverse user account. -- Session Cookie: This is an experimental feature primarily designed for Dataverse SPA development. It is necessary to enable the corresponding feature flag in the Dataverse installation (See https://guides.dataverse.org/en/latest/installation/config.html?#feature-flags). It is recommended not to use this mechanism. +- **Session Cookie**: This is an experimental feature primarily designed for Dataverse SPA development. It is necessary to enable the corresponding feature flag in the Dataverse installation to use this mechanism (See https://guides.dataverse.org/en/latest/installation/config.html?#feature-flags). It is recommended not to use this mechanism and instead use API Key authentication. -TODO \ No newline at end of file +It is recommended to globally initialize the `ApiConfig` object from the consuming application, as the configuration will be read on every API call made by the package's use cases. + +For example, in a React application, we can globally initialize the `ApiConfig` object in the `App` file, like this: + +```typescript +ApiConfig.init(, DataverseApiAuthMechanism.API_KEY, ) + +function App() { + /* Yor App code */ +} + +export default App +```` + +The same example but with example values set: + +```typescript +ApiConfig.init('http://localhost:8000/api/v1', DataverseApiAuthMechanism.API_KEY, 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx') + +function App() { + /* Yor App code */ +} + +export default App +```` + +We can initialize the `ApiConfig` object as an unauthenticated user, by setting `undefined` as the API Key value. + +This will allow use cases that do not require authentication to be successfully executed, but those that do require authentication will fail. From 0db5faa5b74611a3da28e776772bbd2d22fc8b69 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 24 Jan 2024 16:09:01 +0000 Subject: [PATCH 45/96] Stash: useCase docs WIP. Added initial docs for GetAllDatasetPreviews use case --- docs/useCases.md | 29 +++++++++++++++++++ .../domain/useCases/GetAllDatasetPreviews.ts | 7 +++++ 2 files changed, 36 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index 23a30118..f7b5d79a 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -8,6 +8,35 @@ The different use cases currently available in the package are classified below, ## Datasets +### Read operations + +#### [GetAllDatasetPreviews](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) + +Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains information for each dataset that the calling user can access in the installation. + +##### Example call: + +````typescript +import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const limit = 10; +const offset = 20; + +getAllDatasetPreviews + .execute(limit, offset) + .then((subset: DatasetPreviewSubset) => { + /* ... */ + }); + +/* ... */ +```` + +Note that `limit` and `offset` are optional parameters for pagination. + +#### [GetDataset](../src/datasets/domain/useCases/GetDataset.ts) + TODO ## Files diff --git a/src/datasets/domain/useCases/GetAllDatasetPreviews.ts b/src/datasets/domain/useCases/GetAllDatasetPreviews.ts index e10cd9c3..66266a9f 100644 --- a/src/datasets/domain/useCases/GetAllDatasetPreviews.ts +++ b/src/datasets/domain/useCases/GetAllDatasetPreviews.ts @@ -9,6 +9,13 @@ export class GetAllDatasetPreviews implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns an instance of DatasetPreviewSubset that contains information for each dataset that the calling user can access in the installation. + * + * @param {number} [limit] - Limit for pagination (optional). + * @param {number} [offset] - Offset for pagination (optional). + * @returns {Promise} + */ async execute(limit?: number, offset?: number): Promise { return await this.datasetsRepository.getAllDatasetPreviews(limit, offset); } From 9d0dbcb3e60f26dc7b80b967bb57ef50b130bf8b Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 24 Jan 2024 16:20:38 +0000 Subject: [PATCH 46/96] Added: GetAllDatasetPreviews doc tweak --- docs/useCases.md | 2 +- src/datasets/domain/useCases/GetAllDatasetPreviews.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index f7b5d79a..89168365 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -12,7 +12,7 @@ The different use cases currently available in the package are classified below, #### [GetAllDatasetPreviews](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) -Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains information for each dataset that the calling user can access in the installation. +Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains reduced information for each dataset that the calling user can access in the installation. ##### Example call: diff --git a/src/datasets/domain/useCases/GetAllDatasetPreviews.ts b/src/datasets/domain/useCases/GetAllDatasetPreviews.ts index 66266a9f..baa0c7ea 100644 --- a/src/datasets/domain/useCases/GetAllDatasetPreviews.ts +++ b/src/datasets/domain/useCases/GetAllDatasetPreviews.ts @@ -10,7 +10,7 @@ export class GetAllDatasetPreviews implements UseCase { } /** - * Returns an instance of DatasetPreviewSubset that contains information for each dataset that the calling user can access in the installation. + * Returns an instance of DatasetPreviewSubset that contains reduced information for each dataset that the calling user can access in the installation. * * @param {number} [limit] - Limit for pagination (optional). * @param {number} [offset] - Offset for pagination (optional). From 16cdf9a987341b69a15aca57492415c5e6f04f63 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 26 Jan 2024 12:34:50 +0000 Subject: [PATCH 47/96] Added: table of contents to useCases md and structure tweaks --- docs/useCases.md | 47 ++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 42 insertions(+), 5 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 89168365..821f37c1 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -6,11 +6,48 @@ This package exposes the functionality in the form of use cases, with the main g The different use cases currently available in the package are classified below, according to the subdomains they target: +## Table of Contents + +- [Datasets](#Datasets) + - [Datasets read use cases](#datasets-read-use-cases) + - [Get a dataset](#get-a-dataset) + - [List all datasets](#list-all-datasets) +- [Installation](#installation) +- [Usage](#usage) +- [Examples](#examples) +- [Conclusion](#conclusion) + ## Datasets -### Read operations +### Datasets read use cases + +#### Get a dataset + +Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given the parameters that identify it. + +##### Example call: + +````typescript +import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript' + +/* ... */ -#### [GetAllDatasetPreviews](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) +const datasetId = 'doi:10.77777/FK2/AAAAAA'; +const datasetVersionId = 20; + +getDataset + .execute(datasetId, datasetVersionId) + .then((dataset: Dataset) => { + /* ... */ + }); + +/* ... */ +```` + +*See [use case](../src/datasets/domain/useCases/GetDataset.ts)* definition. + + +#### List all datasets Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains reduced information for each dataset that the calling user can access in the installation. @@ -33,11 +70,11 @@ getAllDatasetPreviews /* ... */ ```` -Note that `limit` and `offset` are optional parameters for pagination. +*See [use case](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) definition*. -#### [GetDataset](../src/datasets/domain/useCases/GetDataset.ts) +Note that `limit` and `offset` are optional parameters for pagination. -TODO +The `DatasetPreviewSubset`returned instance contains a property called `totalDatasetCount` which is necessary for pagination. ## Files From a0f4ffd9d5ba280b27af9135012716d7600963da Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 26 Jan 2024 12:38:25 +0000 Subject: [PATCH 48/96] Added: empty TOC sections to useCases.md --- docs/useCases.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 821f37c1..fae3c1eb 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -12,10 +12,10 @@ The different use cases currently available in the package are classified below, - [Datasets read use cases](#datasets-read-use-cases) - [Get a dataset](#get-a-dataset) - [List all datasets](#list-all-datasets) -- [Installation](#installation) -- [Usage](#usage) -- [Examples](#examples) -- [Conclusion](#conclusion) +- [Files](#Files) +- [Metadata Blocks](#metadata-blocks) +- [Users](#Users) +- [Info](#Info) ## Datasets From 167801eeef2f3fc406f96c93934009a379f2930f Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 26 Jan 2024 12:42:12 +0000 Subject: [PATCH 49/96] Added: DDD link --- docs/useCases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index fae3c1eb..7b4f33b0 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -1,6 +1,6 @@ # Use Cases -In the context of Domain-Driven Design (DDD), a use case is a specific way to describe and capture a user's or system's interaction with the domain to achieve a particular goal. +In the context of [Domain-Driven Design (DDD)](https://martinfowler.com/bliki/DomainDrivenDesign.html), a use case is a specific way to describe and capture a user's or system's interaction with the domain to achieve a particular goal. This package exposes the functionality in the form of use cases, with the main goal that any package consumer can easily identify the desired functionality. From e65723e6ffbfc5ff7d8e1d6612a1d83979a83f7f Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 26 Jan 2024 13:03:49 +0000 Subject: [PATCH 50/96] Added: GetDataset use case docs --- docs/useCases.md | 11 +++++++++-- src/datasets/domain/useCases/GetDataset.ts | 8 ++++++++ 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 7b4f33b0..4bbff16c 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -23,7 +23,7 @@ The different use cases currently available in the package are classified below, #### Get a dataset -Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given the parameters that identify it. +Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given the search parameters to identify it. ##### Example call: @@ -33,7 +33,7 @@ import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript' /* ... */ const datasetId = 'doi:10.77777/FK2/AAAAAA'; -const datasetVersionId = 20; +const datasetVersionId = '1.0'; getDataset .execute(datasetId, datasetVersionId) @@ -46,6 +46,13 @@ getDataset *See [use case](../src/datasets/domain/useCases/GetDataset.ts)* definition. +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. This parameter is optional and if it is not set, the default value is: `DatasetNotNumberedVersion.LATEST`. + +There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + + #### List all datasets diff --git a/src/datasets/domain/useCases/GetDataset.ts b/src/datasets/domain/useCases/GetDataset.ts index 4cbef29d..d5e2fc0c 100644 --- a/src/datasets/domain/useCases/GetDataset.ts +++ b/src/datasets/domain/useCases/GetDataset.ts @@ -10,6 +10,14 @@ export class GetDataset implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns a Dataset instance, given the search parameters to identify it. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false + * @returns {Promise} + */ async execute( datasetId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, From bd407a9242a36a575ef944f106632011a1dcb0f5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 26 Jan 2024 13:05:57 +0000 Subject: [PATCH 51/96] Added: doc tweaks --- docs/useCases.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 4bbff16c..1e392dab 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -23,7 +23,7 @@ The different use cases currently available in the package are classified below, #### Get a dataset -Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given the search parameters to identify it. +Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given the search parameters to identify it. ##### Example call: @@ -48,9 +48,9 @@ getDataset The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. -The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. This parameter is optional and if it is not set, the default value is: `DatasetNotNumberedVersion.LATEST`. +The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, parameter the default value is `DatasetNotNumberedVersion.LATEST`. -There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, parameter the default value is `false`. From 579b98e72a66c13162371008a22067f29cf6e39e Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 26 Jan 2024 13:11:41 +0000 Subject: [PATCH 52/96] Stash: GetDatasetCitation docs WIP --- docs/useCases.md | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 1e392dab..6d3091e3 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -10,8 +10,9 @@ The different use cases currently available in the package are classified below, - [Datasets](#Datasets) - [Datasets read use cases](#datasets-read-use-cases) - - [Get a dataset](#get-a-dataset) - - [List all datasets](#list-all-datasets) + - [Get a Dataset](#get-a-dataset) + - [Get Citation Text from a Dataset](#get-citation-text-from-a-dataset) + - [List all Datasets](#list-all-datasets) - [Files](#Files) - [Metadata Blocks](#metadata-blocks) - [Users](#Users) @@ -19,9 +20,9 @@ The different use cases currently available in the package are classified below, ## Datasets -### Datasets read use cases +### Datasets Read Use Cases -#### Get a dataset +#### Get a Dataset Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given the search parameters to identify it. @@ -48,13 +49,19 @@ getDataset The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. -The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, parameter the default value is `DatasetNotNumberedVersion.LATEST`. +The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. -There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, parameter the default value is `false`. +There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +#### Get Citation Text from a Dataset +TODO + +##### Example call: + +*See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) definition*. -#### List all datasets +#### List All Datasets Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains reduced information for each dataset that the calling user can access in the installation. From 291b4a0916f0fbc3af7368fe76863cb016049fb1 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 31 Jan 2024 10:33:20 +0000 Subject: [PATCH 53/96] Added: GetDatasetCitation docs --- docs/useCases.md | 21 ++++++++++++++++++- .../domain/useCases/GetDatasetCitation.ts | 8 +++++++ 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index 6d3091e3..ff66d77f 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -55,12 +55,31 @@ There is a third optional parameter called `includeDeaccessioned`, which indicat #### Get Citation Text from a Dataset -TODO +Returns the Dataset citation text. ##### Example call: +````typescript +import { getDatasetCitation } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const datasetId = 'doi:10.77777/FK2/AAAAAA'; +const datasetVersionId = '1.0'; + +getDatasetCitation + .execute(datasetId, datasetVersionId) + .then((citationText: string) => { + /* ... */ + }); + +/* ... */ +```` + *See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) definition*. +There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + #### List All Datasets Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains reduced information for each dataset that the calling user can access in the installation. diff --git a/src/datasets/domain/useCases/GetDatasetCitation.ts b/src/datasets/domain/useCases/GetDatasetCitation.ts index 384a3e6c..73bba66c 100644 --- a/src/datasets/domain/useCases/GetDatasetCitation.ts +++ b/src/datasets/domain/useCases/GetDatasetCitation.ts @@ -9,6 +9,14 @@ export class GetDatasetCitation implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns the Dataset citation text. + * + * @param {number} [datasetId] - The dataset identifier. + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false + * @returns {Promise} + */ async execute( datasetId: number, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, From c1039f11ef3acc1103b0aaeee92883381fee71f4 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 31 Jan 2024 10:43:56 +0000 Subject: [PATCH 54/96] Added: getDatasetLocks docs --- docs/useCases.md | 35 ++++++++++++++++--- .../domain/useCases/GetDatasetLocks.ts | 6 ++++ 2 files changed, 37 insertions(+), 4 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index ff66d77f..395723f2 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -11,8 +11,9 @@ The different use cases currently available in the package are classified below, - [Datasets](#Datasets) - [Datasets read use cases](#datasets-read-use-cases) - [Get a Dataset](#get-a-dataset) - - [Get Citation Text from a Dataset](#get-citation-text-from-a-dataset) - - [List all Datasets](#list-all-datasets) + - [Get Dataset Citation Text](#get-dataset-citation-text) + - [Get Dataset Locks](#get-dataset-locks) + - [List All Datasets](#list-all-datasets) - [Files](#Files) - [Metadata Blocks](#metadata-blocks) - [Users](#Users) @@ -53,7 +54,7 @@ The `datasetVersionId` parameter can correspond to a numeric version identifier, There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. -#### Get Citation Text from a Dataset +#### Get Dataset Citation Text Returns the Dataset citation text. @@ -64,7 +65,7 @@ import { getDatasetCitation } from '@iqss/dataverse-client-javascript' /* ... */ -const datasetId = 'doi:10.77777/FK2/AAAAAA'; +const datasetId = 2; const datasetVersionId = '1.0'; getDatasetCitation @@ -80,6 +81,32 @@ getDatasetCitation There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +#### Get Dataset Locks + +Returns a [DatasetLock](../src/datasets/domain/models/DatasetLock.ts) array of all locks present in a Dataset. + +##### Example call: + +````typescript +import { getDatasetLocks } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const datasetId = 'doi:10.77777/FK2/AAAAAA'; + +getDatasetLocks + .execute(datasetId) + .then((datasetLocks: DatasetLock[]) => { + /* ... */ + }); + +/* ... */ +```` + +*See [use case](../src/datasets/domain/useCases/GetDatasetLocks.ts) definition*. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + #### List All Datasets Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains reduced information for each dataset that the calling user can access in the installation. diff --git a/src/datasets/domain/useCases/GetDatasetLocks.ts b/src/datasets/domain/useCases/GetDatasetLocks.ts index f44dccda..b4164567 100644 --- a/src/datasets/domain/useCases/GetDatasetLocks.ts +++ b/src/datasets/domain/useCases/GetDatasetLocks.ts @@ -9,6 +9,12 @@ export class GetDatasetLocks implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns all locks present in a Dataset. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ async execute(datasetId: number | string): Promise { return await this.datasetsRepository.getDatasetLocks(datasetId); } From 5a76991fdeaa538f2e5c8081ef48e319344b2254 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 31 Jan 2024 10:49:35 +0000 Subject: [PATCH 55/96] Added: GetDatasetSummaryFieldNames docs --- docs/useCases.md | 23 +++++++++++++++++++ .../useCases/GetDatasetSummaryFieldNames.ts | 5 ++++ 2 files changed, 28 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index 395723f2..bcedbf1c 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -13,6 +13,7 @@ The different use cases currently available in the package are classified below, - [Get a Dataset](#get-a-dataset) - [Get Dataset Citation Text](#get-dataset-citation-text) - [Get Dataset Locks](#get-dataset-locks) + - [Get Dataset Summary Field Names](#get-dataset-summary-field-names) - [List All Datasets](#list-all-datasets) - [Files](#Files) - [Metadata Blocks](#metadata-blocks) @@ -107,6 +108,28 @@ getDatasetLocks The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. +#### Get Dataset Summary Field Names + +Returns the names of the dataset summary fields configured in the installation. + +##### Example call: + +````typescript +import { getDatasetSummaryFieldNames } from '@iqss/dataverse-client-javascript' + +/* ... */ + +getDatasetSummaryFieldNames + .execute() + .then((names: string[]) => { + /* ... */ + }); + +/* ... */ +```` + +*See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) definition*. + #### List All Datasets Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains reduced information for each dataset that the calling user can access in the installation. diff --git a/src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts b/src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts index 4041e408..4b67afc3 100644 --- a/src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts +++ b/src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts @@ -8,6 +8,11 @@ export class GetDatasetSummaryFieldNames implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns the names of the dataset summary fields configured in the installation. + * + * @returns {Promise} + */ async execute(): Promise { return await this.datasetsRepository.getDatasetSummaryFieldNames(); } From 2a175bc9de9c91b83642532c5fa3d1f00d4419c8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 31 Jan 2024 10:55:29 +0000 Subject: [PATCH 56/96] Added: GetDatasetUserPermissions docs --- docs/useCases.md | 27 +++++++++++++++++++ .../useCases/GetDatasetUserPermissions.ts | 6 +++++ 2 files changed, 33 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index bcedbf1c..448a91e2 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -14,6 +14,7 @@ The different use cases currently available in the package are classified below, - [Get Dataset Citation Text](#get-dataset-citation-text) - [Get Dataset Locks](#get-dataset-locks) - [Get Dataset Summary Field Names](#get-dataset-summary-field-names) + - [Get User Permissions on a Dataset](#get-user-permissions-on-a-dataset) - [List All Datasets](#list-all-datasets) - [Files](#Files) - [Metadata Blocks](#metadata-blocks) @@ -130,6 +131,32 @@ getDatasetSummaryFieldNames *See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) definition*. +#### Get User Permissions on a Dataset + +Returns an instance of [DatasetUserPermissions](../src/datasets/domain/models/DatasetUserPermissions.ts) that includes the permissions that the calling user has on a particular Dataset. + +##### Example call: + +````typescript +import { getDatasetUserPermissions } from '@iqss/dataverse-client-javascript' + +/* ... */ + +const datasetId = 'doi:10.77777/FK2/AAAAAA'; + +getDatasetUserPermissions + .execute(datasetId) + .then((permissions: DatasetUserPermissions) => { + /* ... */ + }); + +/* ... */ +```` + +*See [use case](../src/datasets/domain/useCases/GetDatasetUserPermissions.ts) definition*. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + #### List All Datasets Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains reduced information for each dataset that the calling user can access in the installation. diff --git a/src/datasets/domain/useCases/GetDatasetUserPermissions.ts b/src/datasets/domain/useCases/GetDatasetUserPermissions.ts index 594e0ba6..13055977 100644 --- a/src/datasets/domain/useCases/GetDatasetUserPermissions.ts +++ b/src/datasets/domain/useCases/GetDatasetUserPermissions.ts @@ -9,6 +9,12 @@ export class GetDatasetUserPermissions implements UseCase} + */ async execute(datasetId: number | string): Promise { return await this.datasetsRepository.getDatasetUserPermissions(datasetId); } From ed05893c41124953a1023e73ececd2bd5adaf736 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 31 Jan 2024 15:07:08 +0000 Subject: [PATCH 57/96] Added: Private URL use cases docs --- docs/useCases.md | 154 +++++++++++------- .../domain/useCases/GetPrivateUrlDataset.ts | 6 + .../useCases/GetPrivateUrlDatasetCitation.ts | 6 + 3 files changed, 105 insertions(+), 61 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 448a91e2..ceffb1b1 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -1,6 +1,6 @@ # Use Cases -In the context of [Domain-Driven Design (DDD)](https://martinfowler.com/bliki/DomainDrivenDesign.html), a use case is a specific way to describe and capture a user's or system's interaction with the domain to achieve a particular goal. +In the context of [Domain-Driven Design (DDD)](https://martinfowler.com/bliki/DomainDrivenDesign.html), a use case is a specific way to describe and capture a user's or system's interaction with the domain to achieve a particular goal. This package exposes the functionality in the form of use cases, with the main goal that any package consumer can easily identify the desired functionality. @@ -11,7 +11,9 @@ The different use cases currently available in the package are classified below, - [Datasets](#Datasets) - [Datasets read use cases](#datasets-read-use-cases) - [Get a Dataset](#get-a-dataset) + - [Get Dataset By Private URL Token](#get-dataset-by-private-url-token) - [Get Dataset Citation Text](#get-dataset-citation-text) + - [Get Dataset Citation Text By Private URL](#get-dataset-citation-text-by-private-url) - [Get Dataset Locks](#get-dataset-locks) - [Get Dataset Summary Field Names](#get-dataset-summary-field-names) - [Get User Permissions on a Dataset](#get-user-permissions-on-a-dataset) @@ -31,24 +33,22 @@ Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given th ##### Example call: -````typescript -import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript' +```typescript +import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript'; /* ... */ const datasetId = 'doi:10.77777/FK2/AAAAAA'; const datasetVersionId = '1.0'; -getDataset - .execute(datasetId, datasetVersionId) - .then((dataset: Dataset) => { - /* ... */ - }); - +getDataset.execute(datasetId, datasetVersionId).then((dataset: Dataset) => { + /* ... */ +}); + /* ... */ -```` +``` -*See [use case](../src/datasets/domain/useCases/GetDataset.ts)* definition. +_See [use case](../src/datasets/domain/useCases/GetDataset.ts)_ definition. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -56,56 +56,94 @@ The `datasetVersionId` parameter can correspond to a numeric version identifier, There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +#### Get Dataset By Private URL Token + +Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given an associated Private URL Token. + +```typescript +import { getPrivateUrlDataset } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const token = 'a56444bc-7697-4711-8964-e0577f055fd2'; + +getPrivateUrlDataset.execute(token).then((dataset: Dataset) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetPrivateUrlDataset.ts)_ definition. + #### Get Dataset Citation Text Returns the Dataset citation text. ##### Example call: -````typescript -import { getDatasetCitation } from '@iqss/dataverse-client-javascript' +```typescript +import { getDatasetCitation } from '@iqss/dataverse-client-javascript'; /* ... */ const datasetId = 2; const datasetVersionId = '1.0'; -getDatasetCitation - .execute(datasetId, datasetVersionId) - .then((citationText: string) => { - /* ... */ - }); - +getDatasetCitation.execute(datasetId, datasetVersionId).then((citationText: string) => { + /* ... */ +}); + /* ... */ -```` +``` -*See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) definition*. +_See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) definition_. There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +#### Get Dataset Citation Text By Private URL + +Returns the Dataset citation text, given an associated Private URL Token. + +##### Example call: + +```typescript +import { getPrivateUrlDatasetCitation } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const token = 'a56444bc-7697-4711-8964-e0577f055fd2'; + +getPrivateUrlDatasetCitation.execute(token).then((citationText: string) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts) definition_. + #### Get Dataset Locks Returns a [DatasetLock](../src/datasets/domain/models/DatasetLock.ts) array of all locks present in a Dataset. ##### Example call: -````typescript -import { getDatasetLocks } from '@iqss/dataverse-client-javascript' +```typescript +import { getDatasetLocks } from '@iqss/dataverse-client-javascript'; /* ... */ const datasetId = 'doi:10.77777/FK2/AAAAAA'; -getDatasetLocks - .execute(datasetId) - .then((datasetLocks: DatasetLock[]) => { - /* ... */ - }); - +getDatasetLocks.execute(datasetId).then((datasetLocks: DatasetLock[]) => { + /* ... */ +}); + /* ... */ -```` +``` -*See [use case](../src/datasets/domain/useCases/GetDatasetLocks.ts) definition*. +_See [use case](../src/datasets/domain/useCases/GetDatasetLocks.ts) definition_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -115,21 +153,19 @@ Returns the names of the dataset summary fields configured in the installation. ##### Example call: -````typescript -import { getDatasetSummaryFieldNames } from '@iqss/dataverse-client-javascript' +```typescript +import { getDatasetSummaryFieldNames } from '@iqss/dataverse-client-javascript'; /* ... */ -getDatasetSummaryFieldNames - .execute() - .then((names: string[]) => { - /* ... */ - }); - +getDatasetSummaryFieldNames.execute().then((names: string[]) => { + /* ... */ +}); + /* ... */ -```` +``` -*See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) definition*. +_See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) definition_. #### Get User Permissions on a Dataset @@ -137,23 +173,21 @@ Returns an instance of [DatasetUserPermissions](../src/datasets/domain/models/Da ##### Example call: -````typescript -import { getDatasetUserPermissions } from '@iqss/dataverse-client-javascript' +```typescript +import { getDatasetUserPermissions } from '@iqss/dataverse-client-javascript'; /* ... */ const datasetId = 'doi:10.77777/FK2/AAAAAA'; -getDatasetUserPermissions - .execute(datasetId) - .then((permissions: DatasetUserPermissions) => { - /* ... */ - }); - +getDatasetUserPermissions.execute(datasetId).then((permissions: DatasetUserPermissions) => { + /* ... */ +}); + /* ... */ -```` +``` -*See [use case](../src/datasets/domain/useCases/GetDatasetUserPermissions.ts) definition*. +_See [use case](../src/datasets/domain/useCases/GetDatasetUserPermissions.ts) definition_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -163,24 +197,22 @@ Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/Data ##### Example call: -````typescript -import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript' +```typescript +import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript'; /* ... */ const limit = 10; const offset = 20; -getAllDatasetPreviews - .execute(limit, offset) - .then((subset: DatasetPreviewSubset) => { - /* ... */ - }); - +getAllDatasetPreviews.execute(limit, offset).then((subset: DatasetPreviewSubset) => { + /* ... */ +}); + /* ... */ -```` +``` -*See [use case](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) definition*. +_See [use case](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) definition_. Note that `limit` and `offset` are optional parameters for pagination. diff --git a/src/datasets/domain/useCases/GetPrivateUrlDataset.ts b/src/datasets/domain/useCases/GetPrivateUrlDataset.ts index 3580632c..2e05906d 100644 --- a/src/datasets/domain/useCases/GetPrivateUrlDataset.ts +++ b/src/datasets/domain/useCases/GetPrivateUrlDataset.ts @@ -9,6 +9,12 @@ export class GetPrivateUrlDataset implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns a Dataset instance, given an associated Private URL Token. + * + * @param {string} [token] - A Private URL token. + * @returns {Promise} + */ async execute(token: string): Promise { return await this.datasetsRepository.getPrivateUrlDataset(token); } diff --git a/src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts b/src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts index 7ff06f47..508376e8 100644 --- a/src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts +++ b/src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts @@ -8,6 +8,12 @@ export class GetPrivateUrlDatasetCitation implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns the Dataset citation text, given an associated Private URL Token. + * + * @param {string} [token] - A Private URL token. + * @returns {Promise} + */ async execute(token: string): Promise { return await this.datasetsRepository.getPrivateUrlDatasetCitation(token); } From 54d92f0e914c588d4c5af92dda4e862ace90d157 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 1 Feb 2024 12:44:31 +0000 Subject: [PATCH 58/96] Added: GetDatasetFiles docs --- docs/useCases.md | 61 +++++++++++++++++++- src/files/domain/useCases/GetDatasetFiles.ts | 12 +++- 2 files changed, 71 insertions(+), 2 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index ceffb1b1..124b2b48 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -19,6 +19,8 @@ The different use cases currently available in the package are classified below, - [Get User Permissions on a Dataset](#get-user-permissions-on-a-dataset) - [List All Datasets](#list-all-datasets) - [Files](#Files) + - [Files read use cases](#files-read-use-cases) + - [List Files in a Dataset](#list-files-in-a-dataset) - [Metadata Blocks](#metadata-blocks) - [Users](#Users) - [Info](#Info) @@ -220,7 +222,64 @@ The `DatasetPreviewSubset`returned instance contains a property called `totalDat ## Files -TODO +### Files read use cases + +#### List Files in a Dataset + +Returns an instance of [FilesSubset](../src/files/domain/models/FilesSubset.ts), which contains the files from the requested Dataset and page (if pagination parameters are set). + +##### Example call: + +```typescript +import { getDatasetFiles } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId = 2; +const datasetVersionId = '1.0'; + +getDatasetFiles.execute(datasetId, datasetVersionId).then((subset: FilesSubset) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetDatasetFiles.ts) definition_. + +This use case supports the following optional parameters depending on the search goals: + +- **includeDeaccessioned**: (boolean) Indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +- **limit**: (number) Limit for pagination. +- **offset**: (number) Offset for pagination. +- **fileSearchCriteria**: ([FileSearchCriteria](../src/files/domain/models/FileCriteria.ts)) Supports filtering the files by different file properties. +- **fileOrderCriteria**: ([FileOrderCriteria](../src/files/domain/models/FileCriteria.ts)) Supports ordering the results according to different criteria. If not set, the defalt value is `FileOrderCriteria.NAME_AZ`. + +##### Example call using optional parameters: + +```typescript +import { getDatasetFiles } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; +const includeDeaccessioned: boolean = true; +const limit: number = 10; +const offset: number = 20; +const searchCriteria: FileSearchCriteria = { + searchText: 'file title', +}; +const orderCriteria: FileOrderCriteria = FileOrderCriteria.NEWEST; + +getDatasetFiles + .execute(datasetId, datasetVersionId, includeDeaccessioned, limit, offset, searchCriteria, orderCriteria) + .then((subset: FilesSubset) => { + /* ... */ + }); + +/* ... */ +``` ## Metadata Blocks diff --git a/src/files/domain/useCases/GetDatasetFiles.ts b/src/files/domain/useCases/GetDatasetFiles.ts index 9099f9a3..d533f815 100644 --- a/src/files/domain/useCases/GetDatasetFiles.ts +++ b/src/files/domain/useCases/GetDatasetFiles.ts @@ -1,6 +1,6 @@ import { UseCase } from '../../../core/domain/useCases/UseCase'; import { IFilesRepository } from '../repositories/IFilesRepository'; -import { FilesSubset } from "../models/FilesSubset"; +import { FilesSubset } from '../models/FilesSubset'; import { FileSearchCriteria, FileOrderCriteria } from '../models/FileCriteria'; import { DatasetNotNumberedVersion } from '../../../datasets'; @@ -11,6 +11,16 @@ export class GetDatasetFiles implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns an instance of FilesSubset, which contains the files from the requested Dataset and page (if pagination parameters are set). + * + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false. + * @param {number} [limit] - Limit for pagination (optional). + * @param {number} [offset] - Offset for pagination (optional). + * @param {FileSearchCriteria} [fileSearchCriteria] - Supports filtering the files by different file properties (optional). + * @param {FileOrderCriteria} [fileOrderCriteria=FileOrderCriteria.NAME_AZ] - Supports ordering the results according to different criteria. If not set, the defalt value is FileOrderCriteria.NAME_AZ. + * @returns {Promise} + */ async execute( datasetId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, From 71cc2f0ef9de7c3a67bcb25a04810c8aa2829454 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 1 Feb 2024 12:49:05 +0000 Subject: [PATCH 59/96] Fixed: usecase doc title --- docs/useCases.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 124b2b48..b73badeb 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -13,7 +13,7 @@ The different use cases currently available in the package are classified below, - [Get a Dataset](#get-a-dataset) - [Get Dataset By Private URL Token](#get-dataset-by-private-url-token) - [Get Dataset Citation Text](#get-dataset-citation-text) - - [Get Dataset Citation Text By Private URL](#get-dataset-citation-text-by-private-url) + - [Get Dataset Citation Text By Private URL Token](#get-dataset-citation-text-by-private-url-token) - [Get Dataset Locks](#get-dataset-locks) - [Get Dataset Summary Field Names](#get-dataset-summary-field-names) - [Get User Permissions on a Dataset](#get-user-permissions-on-a-dataset) @@ -103,7 +103,7 @@ _See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) definitio There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. -#### Get Dataset Citation Text By Private URL +#### Get Dataset Citation Text By Private URL Token Returns the Dataset citation text, given an associated Private URL Token. From 4f19efd234061c0a9184e9f80df9cd544aca1f11 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 1 Feb 2024 13:21:31 +0000 Subject: [PATCH 60/96] Added: GetDatasetFileCounts docs --- docs/useCases.md | 33 +++++++++++++++++++ .../domain/useCases/GetDatasetFileCounts.ts | 9 +++++ src/files/domain/useCases/GetDatasetFiles.ts | 2 ++ 3 files changed, 44 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index b73badeb..26fd96d9 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -20,6 +20,7 @@ The different use cases currently available in the package are classified below, - [List All Datasets](#list-all-datasets) - [Files](#Files) - [Files read use cases](#files-read-use-cases) + - [Get File Counts in a Dataset](#get-file-counts-in-a-dataset) - [List Files in a Dataset](#list-files-in-a-dataset) - [Metadata Blocks](#metadata-blocks) - [Users](#Users) @@ -281,6 +282,38 @@ getDatasetFiles /* ... */ ``` +### Get File Counts in a Dataset + +Returns an instance of [FileCounts](../src/files/domain/models/FileCounts.ts), containing the requested Dataset total file count, as well as file counts for the following file properties: + +- **Per content type** +- **Per category name** +- **Per tabular tag name** +- **Per access status** (Possible values: *Public*, *Restricted*, *EmbargoedThenRestricted*, *EmbargoedThenPublic*) + +##### Example call: + +```typescript +import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId = 2; +const datasetVersionId = '1.0'; + +getDatasetFileCounts.execute(datasetId, datasetVersionId).then((subset: FilesSubset) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetDatasetFileCounts.ts) definition_. + +There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + +A fourth optional parameter `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) parameter to retrieve counts only for files that match the specified criteria. + ## Metadata Blocks TODO diff --git a/src/files/domain/useCases/GetDatasetFileCounts.ts b/src/files/domain/useCases/GetDatasetFileCounts.ts index 2ca43c03..6e69b112 100644 --- a/src/files/domain/useCases/GetDatasetFileCounts.ts +++ b/src/files/domain/useCases/GetDatasetFileCounts.ts @@ -11,6 +11,15 @@ export class GetDatasetFileCounts implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns an instance of FileCounts, containing the requested Dataset total file count, as well as file counts for different file properties. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false. + * @param {FileSearchCriteria} [fileSearchCriteria] - Supports filtering the files by different file properties (optional). + * @returns {Promise} + */ async execute( datasetId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, diff --git a/src/files/domain/useCases/GetDatasetFiles.ts b/src/files/domain/useCases/GetDatasetFiles.ts index d533f815..9162a910 100644 --- a/src/files/domain/useCases/GetDatasetFiles.ts +++ b/src/files/domain/useCases/GetDatasetFiles.ts @@ -14,6 +14,8 @@ export class GetDatasetFiles implements UseCase { /** * Returns an instance of FilesSubset, which contains the files from the requested Dataset and page (if pagination parameters are set). * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false. * @param {number} [limit] - Limit for pagination (optional). * @param {number} [offset] - Offset for pagination (optional). From 4714b74a52019a9a0edf0e4a6d4284140254de11 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 11:10:40 +0000 Subject: [PATCH 61/96] Added: doc tweaks for getDatasetFileCounts use case --- docs/useCases.md | 27 +++++++++++++++++++++++++-- 1 file changed, 25 insertions(+), 2 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 26fd96d9..9345ba12 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -289,7 +289,7 @@ Returns an instance of [FileCounts](../src/files/domain/models/FileCounts.ts), c - **Per content type** - **Per category name** - **Per tabular tag name** -- **Per access status** (Possible values: *Public*, *Restricted*, *EmbargoedThenRestricted*, *EmbargoedThenPublic*) +- **Per access status** (Possible values: _Public_, _Restricted_, _EmbargoedThenRestricted_, _EmbargoedThenPublic_) ##### Example call: @@ -301,7 +301,7 @@ import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; const datasetId = 2; const datasetVersionId = '1.0'; -getDatasetFileCounts.execute(datasetId, datasetVersionId).then((subset: FilesSubset) => { +getDatasetFileCounts.execute(datasetId, datasetVersionId).then((fileCounts: FileCounts) => { /* ... */ }); @@ -314,6 +314,29 @@ There is a third optional parameter called `includeDeaccessioned`, which indicat A fourth optional parameter `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) parameter to retrieve counts only for files that match the specified criteria. +##### Example call using optional parameters: + +```typescript +import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; +const includeDeaccessioned: boolean = true; +const searchCriteria: FileSearchCriteria = { + categoryName: 'Physics', +}; + +getDatasetFileCounts + .execute(datasetId, datasetVersionId, includeDeaccessioned, searchCriteria) + .then((fileCounts: FileCounts) => { + /* ... */ + }); + +/* ... */ +``` + ## Metadata Blocks TODO From b419cd4726cc2d445bd335a2148a3a85a59bc0ea Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 14:07:40 +0000 Subject: [PATCH 62/96] Added: getDatasetFilesTotalDownloadSize docs --- docs/useCases.md | 66 +++++++++++++++++-- .../GetDatasetFilesTotalDownloadSize.ts | 10 +++ 2 files changed, 71 insertions(+), 5 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 9345ba12..c7ffb5fb 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -21,6 +21,7 @@ The different use cases currently available in the package are classified below, - [Files](#Files) - [Files read use cases](#files-read-use-cases) - [Get File Counts in a Dataset](#get-file-counts-in-a-dataset) + - [Get the size of Downloading all the files of a Dataset Version](#get-the-size-of-downloading-all-the-files-of-a-dataset-version) - [List Files in a Dataset](#list-files-in-a-dataset) - [Metadata Blocks](#metadata-blocks) - [Users](#Users) @@ -57,7 +58,7 @@ The `datasetId` parameter can be a string, for persistent identifiers, or a numb The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. -There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. #### Get Dataset By Private URL Token @@ -102,7 +103,7 @@ getDatasetCitation.execute(datasetId, datasetVersionId).then((citationText: stri _See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) definition_. -There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. #### Get Dataset Citation Text By Private URL Token @@ -310,9 +311,9 @@ getDatasetFileCounts.execute(datasetId, datasetVersionId).then((fileCounts: File _See [use case](../src/files/domain/useCases/GetDatasetFileCounts.ts) definition_. -There is a third optional parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. -A fourth optional parameter `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) parameter to retrieve counts only for files that match the specified criteria. +An optional fourth parameter `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to retrieve counts only for files that match the specified criteria. ##### Example call using optional parameters: @@ -325,7 +326,7 @@ const datasetId: number = 2; const datasetVersionId: string = '1.0'; const includeDeaccessioned: boolean = true; const searchCriteria: FileSearchCriteria = { - categoryName: 'Physics', + categoryName: 'physics', }; getDatasetFileCounts @@ -337,6 +338,61 @@ getDatasetFileCounts /* ... */ ``` +### Get the size of Downloading all the files of a Dataset Version + +Returns the combined size in bytes of all the files available for download from a particular Dataset. + +##### Example call: + +```typescript +import { getDatasetFilesTotalDownloadSize } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; + +getDatasetFilesTotalDownloadSize.execute(datasetId, datasetVersionId).then((size: number) => { + /* ... */ +}); + +/* ... */ +``` + +There is a third optional parameter called `fileDownloadSizeMode` which receives an enum type of [FileDownloadSizeMode](../src/files/domain/models/FileDownloadSizeMode.ts), and applies a filter criteria to the operation. This parameter supports the following values: + +- `FileDownloadSizeMode.ALL` (Default): Includes both archival and original sizes for tabular files +- `FileDownloadSizeMode.ARCHIVAL`: Includes only the archival size for tabular files +- `FileDownloadSizeMode.ORIGINAL`: Includes only the original size for tabular files + +An optional fourth parameter called `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to only consider files that match the specified criteria. + +An optional fifth parameter called `includeDeaccessioned` indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + +##### Example call using optional parameters: + +```typescript +import { getDatasetFilesTotalDownloadSize } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; +const fileDownloadSizeMode: FileDownloadSizeMode = FileDownloadSizeMode.ARCHIVAL; +const fileSearchCriteria: FileDownloadSizeMode = { + categoryName: 'physics', +}; +const includeDeaccessioned: boolean = true; + +getDatasetFilesTotalDownloadSize + .execute(datasetId, datasetVersionId, fileDownloadSizeMode, fileSearchCriteria, includeDeaccessioned) + .then((size: number) => { + /* ... */ + }); + +/* ... */ +``` + ## Metadata Blocks TODO diff --git a/src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts b/src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts index 8fdbaa6a..c6a32c93 100644 --- a/src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts +++ b/src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts @@ -11,6 +11,16 @@ export class GetDatasetFilesTotalDownloadSize implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns the combined size in bytes of all the files available for download from a particular Dataset. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {FileDownloadSizeMode} [fileDownloadSizeMode=FileDownloadSizeMode.ALL] - Applies a filter mode to the operation to consider only archival sizes, original or both (all). The default value is FileDownloadSizeMode.ALL. + * @param {FileSearchCriteria} [fileSearchCriteria] - Supports filtering the files to obtain their combined size by different file properties (optional). + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false. + * @returns {Promise} + */ async execute( datasetId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, From b1886fe21432e99d67f4649e9844a1922d1b76bd Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 14:23:19 +0000 Subject: [PATCH 63/96] Added: GetFileDownloadCount docs and general doc structure tweaks --- docs/useCases.md | 163 +++++++++++------- .../domain/useCases/GetFileDownloadCount.ts | 6 + 2 files changed, 105 insertions(+), 64 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index c7ffb5fb..2b94727a 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -21,6 +21,7 @@ The different use cases currently available in the package are classified below, - [Files](#Files) - [Files read use cases](#files-read-use-cases) - [Get File Counts in a Dataset](#get-file-counts-in-a-dataset) + - [Get File Download Count](#get-file-download-count) - [Get the size of Downloading all the files of a Dataset Version](#get-the-size-of-downloading-all-the-files-of-a-dataset-version) - [List Files in a Dataset](#list-files-in-a-dataset) - [Metadata Blocks](#metadata-blocks) @@ -101,7 +102,9 @@ getDatasetCitation.execute(datasetId, datasetVersionId).then((citationText: stri /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) definition_. +_See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) implementation. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. @@ -125,7 +128,7 @@ getPrivateUrlDatasetCitation.execute(token).then((citationText: string) => { /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts) definition_. +_See [use case](../src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts) implementation. #### Get Dataset Locks @@ -147,7 +150,7 @@ getDatasetLocks.execute(datasetId).then((datasetLocks: DatasetLock[]) => { /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetDatasetLocks.ts) definition_. +_See [use case](../src/datasets/domain/useCases/GetDatasetLocks.ts) implementation. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -169,7 +172,7 @@ getDatasetSummaryFieldNames.execute().then((names: string[]) => { /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) definition_. +_See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) implementation. #### Get User Permissions on a Dataset @@ -191,7 +194,7 @@ getDatasetUserPermissions.execute(datasetId).then((permissions: DatasetUserPermi /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetDatasetUserPermissions.ts) definition_. +_See [use case](../src/datasets/domain/useCases/GetDatasetUserPermissions.ts) implementation. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -216,7 +219,7 @@ getAllDatasetPreviews.execute(limit, offset).then((subset: DatasetPreviewSubset) /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) definition_. +_See [use case](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) implementation. Note that `limit` and `offset` are optional parameters for pagination. @@ -226,167 +229,199 @@ The `DatasetPreviewSubset`returned instance contains a property called `totalDat ### Files read use cases -#### List Files in a Dataset +#### Get File Counts in a Dataset -Returns an instance of [FilesSubset](../src/files/domain/models/FilesSubset.ts), which contains the files from the requested Dataset and page (if pagination parameters are set). +Returns an instance of [FileCounts](../src/files/domain/models/FileCounts.ts), containing the requested Dataset total file count, as well as file counts for the following file properties: + +- **Per content type** +- **Per category name** +- **Per tabular tag name** +- **Per access status** (Possible values: _Public_, _Restricted_, _EmbargoedThenRestricted_, _EmbargoedThenPublic_) ##### Example call: ```typescript -import { getDatasetFiles } from '@iqss/dataverse-client-javascript'; +import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; /* ... */ const datasetId = 2; const datasetVersionId = '1.0'; -getDatasetFiles.execute(datasetId, datasetVersionId).then((subset: FilesSubset) => { +getDatasetFileCounts.execute(datasetId, datasetVersionId).then((fileCounts: FileCounts) => { /* ... */ }); /* ... */ ``` -_See [use case](../src/files/domain/useCases/GetDatasetFiles.ts) definition_. +_See [use case](../src/files/domain/useCases/GetDatasetFileCounts.ts) implementation. -This use case supports the following optional parameters depending on the search goals: +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. -- **includeDeaccessioned**: (boolean) Indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. -- **limit**: (number) Limit for pagination. -- **offset**: (number) Offset for pagination. -- **fileSearchCriteria**: ([FileSearchCriteria](../src/files/domain/models/FileCriteria.ts)) Supports filtering the files by different file properties. -- **fileOrderCriteria**: ([FileOrderCriteria](../src/files/domain/models/FileCriteria.ts)) Supports ordering the results according to different criteria. If not set, the defalt value is `FileOrderCriteria.NAME_AZ`. +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + +An optional fourth parameter `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to retrieve counts only for files that match the specified criteria. ##### Example call using optional parameters: ```typescript -import { getDatasetFiles } from '@iqss/dataverse-client-javascript'; +import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; /* ... */ const datasetId: number = 2; const datasetVersionId: string = '1.0'; const includeDeaccessioned: boolean = true; -const limit: number = 10; -const offset: number = 20; const searchCriteria: FileSearchCriteria = { - searchText: 'file title', + categoryName: 'physics', }; -const orderCriteria: FileOrderCriteria = FileOrderCriteria.NEWEST; -getDatasetFiles - .execute(datasetId, datasetVersionId, includeDeaccessioned, limit, offset, searchCriteria, orderCriteria) - .then((subset: FilesSubset) => { +getDatasetFileCounts + .execute(datasetId, datasetVersionId, includeDeaccessioned, searchCriteria) + .then((fileCounts: FileCounts) => { /* ... */ }); /* ... */ ``` -### Get File Counts in a Dataset +#### Get File Download Count -Returns an instance of [FileCounts](../src/files/domain/models/FileCounts.ts), containing the requested Dataset total file count, as well as file counts for the following file properties: +Provides the download count for a particular File. -- **Per content type** -- **Per category name** -- **Per tabular tag name** -- **Per access status** (Possible values: _Public_, _Restricted_, _EmbargoedThenRestricted_, _EmbargoedThenPublic_) +##### Example call: + +```typescript +import { getFileDownloadCount } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId: number = 2; + +getFileDownloadCount.execute(fileId).then((count: number) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFileDownloadCount.ts) implementation. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +#### Get the size of Downloading all the files of a Dataset Version + +Returns the combined size in bytes of all the files available for download from a particular Dataset. ##### Example call: ```typescript -import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; +import { getDatasetFilesTotalDownloadSize } from '@iqss/dataverse-client-javascript'; /* ... */ -const datasetId = 2; -const datasetVersionId = '1.0'; +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; -getDatasetFileCounts.execute(datasetId, datasetVersionId).then((fileCounts: FileCounts) => { +getDatasetFilesTotalDownloadSize.execute(datasetId, datasetVersionId).then((size: number) => { /* ... */ }); /* ... */ ``` -_See [use case](../src/files/domain/useCases/GetDatasetFileCounts.ts) definition_. +_See [use case](../src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts) implementation. -There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. -An optional fourth parameter `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to retrieve counts only for files that match the specified criteria. +There is a third optional parameter called `fileDownloadSizeMode` which receives an enum type of [FileDownloadSizeMode](../src/files/domain/models/FileDownloadSizeMode.ts), and applies a filter criteria to the operation. This parameter supports the following values: + +- `FileDownloadSizeMode.ALL` (Default): Includes both archival and original sizes for tabular files +- `FileDownloadSizeMode.ARCHIVAL`: Includes only the archival size for tabular files +- `FileDownloadSizeMode.ORIGINAL`: Includes only the original size for tabular files + +An optional fourth parameter called `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to only consider files that match the specified criteria. + +An optional fifth parameter called `includeDeaccessioned` indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. ##### Example call using optional parameters: ```typescript -import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; +import { getDatasetFilesTotalDownloadSize } from '@iqss/dataverse-client-javascript'; /* ... */ const datasetId: number = 2; const datasetVersionId: string = '1.0'; -const includeDeaccessioned: boolean = true; -const searchCriteria: FileSearchCriteria = { +const mode: FileDownloadSizeMode = FileDownloadSizeMode.ARCHIVAL; +const searchCriteria: FileDownloadSizeMode = { categoryName: 'physics', }; +const includeDeaccessioned: boolean = true; -getDatasetFileCounts - .execute(datasetId, datasetVersionId, includeDeaccessioned, searchCriteria) - .then((fileCounts: FileCounts) => { +getDatasetFilesTotalDownloadSize + .execute(datasetId, datasetVersionId, mode, searchCriteria, includeDeaccessioned) + .then((size: number) => { /* ... */ }); /* ... */ ``` -### Get the size of Downloading all the files of a Dataset Version +#### List Files in a Dataset -Returns the combined size in bytes of all the files available for download from a particular Dataset. +Returns an instance of [FilesSubset](../src/files/domain/models/FilesSubset.ts), which contains the files from the requested Dataset and page (if pagination parameters are set). ##### Example call: ```typescript -import { getDatasetFilesTotalDownloadSize } from '@iqss/dataverse-client-javascript'; +import { getDatasetFiles } from '@iqss/dataverse-client-javascript'; /* ... */ -const datasetId: number = 2; -const datasetVersionId: string = '1.0'; +const datasetId = 2; +const datasetVersionId = '1.0'; -getDatasetFilesTotalDownloadSize.execute(datasetId, datasetVersionId).then((size: number) => { +getDatasetFiles.execute(datasetId, datasetVersionId).then((subset: FilesSubset) => { /* ... */ }); /* ... */ ``` -There is a third optional parameter called `fileDownloadSizeMode` which receives an enum type of [FileDownloadSizeMode](../src/files/domain/models/FileDownloadSizeMode.ts), and applies a filter criteria to the operation. This parameter supports the following values: +_See [use case](../src/files/domain/useCases/GetDatasetFiles.ts) implementation. -- `FileDownloadSizeMode.ALL` (Default): Includes both archival and original sizes for tabular files -- `FileDownloadSizeMode.ARCHIVAL`: Includes only the archival size for tabular files -- `FileDownloadSizeMode.ORIGINAL`: Includes only the original size for tabular files +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. -An optional fourth parameter called `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to only consider files that match the specified criteria. +This use case supports the following optional parameters depending on the search goals: -An optional fifth parameter called `includeDeaccessioned` indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +- **includeDeaccessioned**: (boolean) Indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +- **limit**: (number) Limit for pagination. +- **offset**: (number) Offset for pagination. +- **fileSearchCriteria**: ([FileSearchCriteria](../src/files/domain/models/FileCriteria.ts)) Supports filtering the files by different file properties. +- **fileOrderCriteria**: ([FileOrderCriteria](../src/files/domain/models/FileCriteria.ts)) Supports ordering the results according to different criteria. If not set, the defalt value is `FileOrderCriteria.NAME_AZ`. ##### Example call using optional parameters: ```typescript -import { getDatasetFilesTotalDownloadSize } from '@iqss/dataverse-client-javascript'; +import { getDatasetFiles } from '@iqss/dataverse-client-javascript'; /* ... */ const datasetId: number = 2; const datasetVersionId: string = '1.0'; -const fileDownloadSizeMode: FileDownloadSizeMode = FileDownloadSizeMode.ARCHIVAL; -const fileSearchCriteria: FileDownloadSizeMode = { - categoryName: 'physics', -}; const includeDeaccessioned: boolean = true; +const limit: number = 10; +const offset: number = 20; +const searchCriteria: FileSearchCriteria = { + searchText: 'file title', +}; +const orderCriteria: FileOrderCriteria = FileOrderCriteria.NEWEST; -getDatasetFilesTotalDownloadSize - .execute(datasetId, datasetVersionId, fileDownloadSizeMode, fileSearchCriteria, includeDeaccessioned) - .then((size: number) => { +getDatasetFiles + .execute(datasetId, datasetVersionId, includeDeaccessioned, limit, offset, searchCriteria, orderCriteria) + .then((subset: FilesSubset) => { /* ... */ }); diff --git a/src/files/domain/useCases/GetFileDownloadCount.ts b/src/files/domain/useCases/GetFileDownloadCount.ts index 4d2b41eb..e527e1ef 100644 --- a/src/files/domain/useCases/GetFileDownloadCount.ts +++ b/src/files/domain/useCases/GetFileDownloadCount.ts @@ -8,6 +8,12 @@ export class GetFileDownloadCount implements UseCase { this.filesRepository = filesRepository; } + /** + * Provides the download count for a particular File. + * + * @param {number | string} [fileId] - The file identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ async execute(fileId: number | string): Promise { return await this.filesRepository.getFileDownloadCount(fileId); } From 4c931767eb28ed99ffaa2e580d65fe4403cfc03b Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 14:25:07 +0000 Subject: [PATCH 64/96] Fixed: docs structure --- docs/useCases.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 2b94727a..2149e35c 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -102,7 +102,7 @@ getDatasetCitation.execute(datasetId, datasetVersionId).then((citationText: stri /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) implementation. +_See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -128,7 +128,7 @@ getPrivateUrlDatasetCitation.execute(token).then((citationText: string) => { /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts) implementation. +_See [use case](../src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts) implementation_. #### Get Dataset Locks @@ -150,7 +150,7 @@ getDatasetLocks.execute(datasetId).then((datasetLocks: DatasetLock[]) => { /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetDatasetLocks.ts) implementation. +_See [use case](../src/datasets/domain/useCases/GetDatasetLocks.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -172,7 +172,7 @@ getDatasetSummaryFieldNames.execute().then((names: string[]) => { /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) implementation. +_See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) implementation_. #### Get User Permissions on a Dataset @@ -194,7 +194,7 @@ getDatasetUserPermissions.execute(datasetId).then((permissions: DatasetUserPermi /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetDatasetUserPermissions.ts) implementation. +_See [use case](../src/datasets/domain/useCases/GetDatasetUserPermissions.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -219,7 +219,7 @@ getAllDatasetPreviews.execute(limit, offset).then((subset: DatasetPreviewSubset) /* ... */ ``` -_See [use case](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) implementation. +_See [use case](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) implementation_. Note that `limit` and `offset` are optional parameters for pagination. @@ -255,7 +255,7 @@ getDatasetFileCounts.execute(datasetId, datasetVersionId).then((fileCounts: File /* ... */ ``` -_See [use case](../src/files/domain/useCases/GetDatasetFileCounts.ts) implementation. +_See [use case](../src/files/domain/useCases/GetDatasetFileCounts.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -306,7 +306,7 @@ getFileDownloadCount.execute(fileId).then((count: number) => { /* ... */ ``` -_See [use case](../src/files/domain/useCases/GetFileDownloadCount.ts) implementation. +_See [use case](../src/files/domain/useCases/GetFileDownloadCount.ts) implementation_. The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -331,7 +331,7 @@ getDatasetFilesTotalDownloadSize.execute(datasetId, datasetVersionId).then((size /* ... */ ``` -_See [use case](../src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts) implementation. +_See [use case](../src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. @@ -390,7 +390,7 @@ getDatasetFiles.execute(datasetId, datasetVersionId).then((subset: FilesSubset) /* ... */ ``` -_See [use case](../src/files/domain/useCases/GetDatasetFiles.ts) implementation. +_See [use case](../src/files/domain/useCases/GetDatasetFiles.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. From 7e9a38414812517a4ac7e1c6767815e967f7ee41 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 14:33:48 +0000 Subject: [PATCH 65/96] Added: getFileDataTables docs --- docs/useCases.md | 25 +++++++++++++++++++ .../domain/useCases/GetFileDataTables.ts | 6 +++++ 2 files changed, 31 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index 2149e35c..cac3ca1d 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -21,6 +21,7 @@ The different use cases currently available in the package are classified below, - [Files](#Files) - [Files read use cases](#files-read-use-cases) - [Get File Counts in a Dataset](#get-file-counts-in-a-dataset) + - [Get File Data Tables](#get-file-data-tables) - [Get File Download Count](#get-file-download-count) - [Get the size of Downloading all the files of a Dataset Version](#get-the-size-of-downloading-all-the-files-of-a-dataset-version) - [List Files in a Dataset](#list-files-in-a-dataset) @@ -286,6 +287,30 @@ getDatasetFileCounts /* ... */ ``` +#### Get File Data Tables + +This use case is oriented toward tabular files and provides an array of [FileDataTable](../src/files/domain/models/FileDataTable.ts) objects for an existing tabular file. + +##### Example call: + +```typescript +import { getFileDataTables } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId = 2; + +getFileDataTables.execute(fileId).then((dataTables: FileDataTable[]) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFileDataTables.ts) implementation_. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + #### Get File Download Count Provides the download count for a particular File. diff --git a/src/files/domain/useCases/GetFileDataTables.ts b/src/files/domain/useCases/GetFileDataTables.ts index d8a65891..07ffdeda 100644 --- a/src/files/domain/useCases/GetFileDataTables.ts +++ b/src/files/domain/useCases/GetFileDataTables.ts @@ -9,6 +9,12 @@ export class GetFileDataTables implements UseCase { this.filesRepository = filesRepository; } + /** + * This use case is oriented toward tabular files and provides an array of FileDataTable objects for an existing tabular file. + * + * @param {number | string} [fileId] - The file identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ async execute(fileId: number | string): Promise { return await this.filesRepository.getFileDataTables(fileId); } From bc65e6e8cc6db2c8161e3bda5c8238f63a7fa3b8 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 14:41:04 +0000 Subject: [PATCH 66/96] Added: GetFileUserPermissions docs --- docs/useCases.md | 31 +++++++++++++++++++ .../domain/useCases/GetFileUserPermissions.ts | 6 ++++ 2 files changed, 37 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index cac3ca1d..dbdcd1f3 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -24,6 +24,7 @@ The different use cases currently available in the package are classified below, - [Get File Data Tables](#get-file-data-tables) - [Get File Download Count](#get-file-download-count) - [Get the size of Downloading all the files of a Dataset Version](#get-the-size-of-downloading-all-the-files-of-a-dataset-version) + - [Get User Permissions on a File](#get-user-permissions-on-a-file) - [List Files in a Dataset](#list-files-in-a-dataset) - [Metadata Blocks](#metadata-blocks) - [Users](#Users) @@ -394,6 +395,36 @@ getDatasetFilesTotalDownloadSize /* ... */ ``` +#### Get User Permissions on a File + +This use case returns a [FileUserPermissions](../src/files/domain/models/FileUserPermissions.ts) object, which includes the permissions that the calling user has on a particular File. + +The returned *FileUserPermissions* object contains the following permissions, as booleans: + +- Can download the file (*canDownloadFile*) +- Can manage the file permissions (*canManageFilePermissions*) +- Can edit the file owner dataset (*canEditOwnerDataset*) + +##### Example call: + +```typescript +import { getFileUserPermissions } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId: number = 2; + +getFileUserPermissions.execute(fileId).then((permissions: FileUserPermissions) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFileUserPermissions.ts) implementation_. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + #### List Files in a Dataset Returns an instance of [FilesSubset](../src/files/domain/models/FilesSubset.ts), which contains the files from the requested Dataset and page (if pagination parameters are set). diff --git a/src/files/domain/useCases/GetFileUserPermissions.ts b/src/files/domain/useCases/GetFileUserPermissions.ts index 454984ef..ed2ee266 100644 --- a/src/files/domain/useCases/GetFileUserPermissions.ts +++ b/src/files/domain/useCases/GetFileUserPermissions.ts @@ -9,6 +9,12 @@ export class GetFileUserPermissions implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns a FileUserPermissions object, which includes the permissions that the calling user has on a particular File. + * + * @param {number | string} [fileId] - The file identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ async execute(fileId: number | string): Promise { return await this.filesRepository.getFileUserPermissions(fileId); } From 53c7da7eabb8e972c29b4d5aed3bad7be43ba512 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 14:47:26 +0000 Subject: [PATCH 67/96] Added: GetMetadataBlockByName docs --- docs/useCases.md | 33 ++++++++++++++++--- .../domain/useCases/GetMetadataBlockByName.ts | 6 ++++ 2 files changed, 34 insertions(+), 5 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index dbdcd1f3..11634bcf 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -27,6 +27,7 @@ The different use cases currently available in the package are classified below, - [Get User Permissions on a File](#get-user-permissions-on-a-file) - [List Files in a Dataset](#list-files-in-a-dataset) - [Metadata Blocks](#metadata-blocks) + - [Metadata Blocks read use cases](#metadata-blocks-read-use-cases) - [Users](#Users) - [Info](#Info) @@ -399,11 +400,11 @@ getDatasetFilesTotalDownloadSize This use case returns a [FileUserPermissions](../src/files/domain/models/FileUserPermissions.ts) object, which includes the permissions that the calling user has on a particular File. -The returned *FileUserPermissions* object contains the following permissions, as booleans: +The returned _FileUserPermissions_ object contains the following permissions, as booleans: -- Can download the file (*canDownloadFile*) -- Can manage the file permissions (*canManageFilePermissions*) -- Can edit the file owner dataset (*canEditOwnerDataset*) +- Can download the file (_canDownloadFile_) +- Can manage the file permissions (_canManageFilePermissions_) +- Can edit the file owner dataset (_canEditOwnerDataset_) ##### Example call: @@ -486,7 +487,29 @@ getDatasetFiles ## Metadata Blocks -TODO +### Metadata Blocks read use cases + +#### Get Metadata Block By Name + +Returns a [MetadataBlock](../src/metadataBlocks/domain/models/MetadataBlock.ts) instance, given its name. + +##### Example call: + +```typescript +import { getMetadataBlockByName } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const name = 'citation'; + +getMetadataBlockByName.execute(name).then((metadataBlock: MetadataBlock) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts) implementation_. ## Users diff --git a/src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts b/src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts index dc09c1c4..a34953e3 100644 --- a/src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts +++ b/src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts @@ -9,6 +9,12 @@ export class GetMetadataBlockByName implements UseCase { this.metadataBlocksRepository = metadataBlocksRepository; } + /** + * Returns a MetadataBlock instance, given its name. + * + * @param {string} [metadataBlockName] - The requested metadata block name. + * @returns {Promise} + */ async execute(metadataBlockName: string): Promise { return await this.metadataBlocksRepository.getMetadataBlockByName(metadataBlockName); } From 236e862847d19b50da25d512b2604a9d2a51cb8a Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 14:48:41 +0000 Subject: [PATCH 68/96] Added: table of contents tweak --- docs/useCases.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/useCases.md b/docs/useCases.md index 11634bcf..1c65667a 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -28,6 +28,7 @@ The different use cases currently available in the package are classified below, - [List Files in a Dataset](#list-files-in-a-dataset) - [Metadata Blocks](#metadata-blocks) - [Metadata Blocks read use cases](#metadata-blocks-read-use-cases) + - [Get Metadata Block By Name](#get-metadata-block-by-name) - [Users](#Users) - [Info](#Info) From 5d2bd024b0c7c0e98b925a6cfbb0b0e097ca5b32 Mon Sep 17 00:00:00 2001 From: GPortas Date: Fri, 2 Feb 2024 14:55:19 +0000 Subject: [PATCH 69/96] Added: GetCurrentAuthenticatedUser docs --- docs/useCases.md | 26 +++++++++++++++++-- .../useCases/GetCurrentAuthenticatedUser.ts | 5 ++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 1c65667a..60a95706 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -28,8 +28,10 @@ The different use cases currently available in the package are classified below, - [List Files in a Dataset](#list-files-in-a-dataset) - [Metadata Blocks](#metadata-blocks) - [Metadata Blocks read use cases](#metadata-blocks-read-use-cases) - - [Get Metadata Block By Name](#get-metadata-block-by-name) + - [Get Metadata Block By Name](#get-metadata-block-by-name) - [Users](#Users) + - [Users read use cases](#users-read-use-cases) + - [Get Current Authenticated User](#get-current-authenticated-user) - [Info](#Info) ## Datasets @@ -514,7 +516,27 @@ _See [use case](../src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts) ## Users -TODO +### Users read use cases + +#### Get Current Authenticated User + +Returns the current [AuthenticatedUser](../src/users/domain/models/AuthenticatedUser.ts) corresponding to the authentication mechanism provided through `ApiConfig`. + +##### Example call: + +```typescript +import { getCurrentAuthenticatedUser } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getCurrentAuthenticatedUser.execute().then((user: AuthenticatedUser) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/users/domain/useCases/GetCurrentAuthenticatedUser.ts) implementation_. ## Info diff --git a/src/users/domain/useCases/GetCurrentAuthenticatedUser.ts b/src/users/domain/useCases/GetCurrentAuthenticatedUser.ts index 1b9acb85..6724b518 100644 --- a/src/users/domain/useCases/GetCurrentAuthenticatedUser.ts +++ b/src/users/domain/useCases/GetCurrentAuthenticatedUser.ts @@ -9,6 +9,11 @@ export class GetCurrentAuthenticatedUser implements UseCase { this.usersRepository = usersRepository; } + /** + * Returns the current AuthenticatedUser corresponding to the authentication mechanism provided through ApiConfig. + * + * @returns {Promise} + */ async execute(): Promise { return await this.usersRepository.getCurrentAuthenticatedUser(); } From 6131aceb67ede345a90f404e5aee25d816cfc81e Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 5 Feb 2024 09:08:30 +0000 Subject: [PATCH 70/96] Added: docs for getDataverseVersion --- docs/useCases.md | 21 ++++++++++++++++++- .../domain/useCases/GetDataverseVersion.ts | 5 +++++ 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index 60a95706..614b5970 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -33,6 +33,7 @@ The different use cases currently available in the package are classified below, - [Users read use cases](#users-read-use-cases) - [Get Current Authenticated User](#get-current-authenticated-user) - [Info](#Info) + - [Get Dataverse Backend Version](#get-dataverse-backend-version) ## Datasets @@ -540,4 +541,22 @@ _See [use case](../src/users/domain/useCases/GetCurrentAuthenticatedUser.ts) imp ## Info -TODO +#### Get Dataverse Backend Version + +Returns a [DataverseVersion](../src/info/domain/models/DataverseVersion.ts) object, which contains version information for the Dataverse backend installation. + +##### Example call: + +```typescript +import { getDataverseVersion } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getDataverseVersion.execute().then((version: DataverseVersion) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/info/domain/useCases/GetDataverseVersion.ts) implementation_. diff --git a/src/info/domain/useCases/GetDataverseVersion.ts b/src/info/domain/useCases/GetDataverseVersion.ts index 4937b9d1..51455549 100644 --- a/src/info/domain/useCases/GetDataverseVersion.ts +++ b/src/info/domain/useCases/GetDataverseVersion.ts @@ -9,6 +9,11 @@ export class GetDataverseVersion implements UseCase { this.dataverseInfoRepository = dataverseInfoRepository; } + /** + * Returns a DataverseVersion object, which contains version information for the Dataverse backend installation. + * + * @returns {Promise} + */ async execute(): Promise { return await this.dataverseInfoRepository.getDataverseVersion(); } From 048d4b2e68b1c96bd04f777d2d610df6fe775eef Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 5 Feb 2024 09:11:47 +0000 Subject: [PATCH 71/96] Added: docs for GetMaxEmbargoDurationInMonths --- docs/useCases.md | 21 +++++++++++++++++++ .../useCases/GetMaxEmbargoDurationInMonths.ts | 5 +++++ 2 files changed, 26 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index 614b5970..c1651ffe 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -34,6 +34,7 @@ The different use cases currently available in the package are classified below, - [Get Current Authenticated User](#get-current-authenticated-user) - [Info](#Info) - [Get Dataverse Backend Version](#get-dataverse-backend-version) + - [Get Maximum Embargo Duration In Months](#get-maximum-embargo-duration-in-months) ## Datasets @@ -560,3 +561,23 @@ getDataverseVersion.execute().then((version: DataverseVersion) => { ``` _See [use case](../src/info/domain/useCases/GetDataverseVersion.ts) implementation_. + +#### Get Maximum Embargo Duration In Months + +Returns a number indicating the configured maximum embargo duration in months. + +##### Example call: + +```typescript +import { getMaxEmbargoDurationInMonths } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getMaxEmbargoDurationInMonths.execute().then((months: number) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts) implementation_. diff --git a/src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts b/src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts index b10c4d1e..3c3b62be 100644 --- a/src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts +++ b/src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts @@ -8,6 +8,11 @@ export class GetMaxEmbargoDurationInMonths implements UseCase { this.dataverseInfoRepository = dataverseInfoRepository; } + /** + * Returns a number indicating the configured maximum embargo duration in months. + * + * @returns {Promise} + */ async execute(): Promise { return await this.dataverseInfoRepository.getMaxEmbargoDurationInMonths(); } From 0e825a977d3265d08da17cd78c4a934d4db7c261 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 5 Feb 2024 09:26:33 +0000 Subject: [PATCH 72/96] Added: GetZipDownloadLimit docs --- docs/useCases.md | 21 +++++++++++++++++++ .../domain/useCases/GetZipDownloadLimit.ts | 5 +++++ 2 files changed, 26 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index c1651ffe..7ed393bc 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -35,6 +35,7 @@ The different use cases currently available in the package are classified below, - [Info](#Info) - [Get Dataverse Backend Version](#get-dataverse-backend-version) - [Get Maximum Embargo Duration In Months](#get-maximum-embargo-duration-in-months) + - [Get ZIP Download Limit](#get-zip-download-limit) ## Datasets @@ -581,3 +582,23 @@ getMaxEmbargoDurationInMonths.execute().then((months: number) => { ``` _See [use case](../src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts) implementation_. + +#### Get ZIP Download Limit + +Returns a number indicating the configured ZIP download limit in bytes. + +##### Example call: + +```typescript +import { getZipDownloadLimit } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getZipDownloadLimit.execute().then((downloadLimit: number) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/info/domain/useCases/GetZipDownloadLimit.ts) implementation_. diff --git a/src/info/domain/useCases/GetZipDownloadLimit.ts b/src/info/domain/useCases/GetZipDownloadLimit.ts index 84e8af4b..aa93bb6b 100644 --- a/src/info/domain/useCases/GetZipDownloadLimit.ts +++ b/src/info/domain/useCases/GetZipDownloadLimit.ts @@ -8,6 +8,11 @@ export class GetZipDownloadLimit implements UseCase { this.dataverseInfoRepository = dataverseInfoRepository; } + /** + * Returns a number indicating the configured ZIP download limit in bytes. + * + * @returns {Promise} + */ async execute(): Promise { return await this.dataverseInfoRepository.getZipDownloadLimit(); } From f0d9b386f2c1f88c17e0a4ea505c6c8d1b76dce5 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 5 Feb 2024 09:26:50 +0000 Subject: [PATCH 73/96] Changed: CONTRIBUTING.md --- CONTRIBUTING.md | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d9c3ee2b..244647c4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,31 +3,39 @@ First of all thank you very much for your interest in contributing to this project! ## Getting started -1. Make sure that you have installed the project dependencies -2. Build the project as explained in [README.md](README.md) -3. Fork the repository -4. Apply changes in your own branch -5. Create a pull request that we will review -6. Update README.md if necessary + +1. Fork the repository and clone your fork locally +2. Follow the [Local Development](./docs/localDevelopment.md) guide for setting up your local development environment +3. Create a branch and apply the desired changes on it +4. Create a pull request from your fork branch targeting the develop branch of the root repository ## Checklist before creating PR + - Project builds - Lint and format checks pass -- Unit tests pass -- Unit tests for new functionality/fix are added +- Unit and integration tests pass +- Unit and integration tests for new functionality/fix are added +- Documentation is updated (Any new use case added or modified should be documented in the [Use Cases](./docs/useCases.md) section) ## Code of Conduct + We abide by the upstream Code of Conduct at https://github.com/IQSS/dataverse/blob/develop/CODE_OF_CONDUCT.md and in addition ask the following. ### Git + - Branch names are self descriptive - Commit messages are short and concise - Branch is put up to date before creating PR ### Our responsibilities + - To keep the code clean - To provide constructive feedback to other developers - To maintain readable code at all times ## Getting help -Please feel free to reach out in https://chat.dataverse.org or https://groups.google.com/g/dataverse-dev \ No newline at end of file + +Please, do not hesitate to contact us through: + +- Zulip: https://dataverse.zulipchat.com/#narrow/stream/410361-ui-dev +- Google Groups: https://groups.google.com/g/dataverse-dev From 4deb4f99a8c483d57a0df486635e5ca5a6b19b3f Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 5 Feb 2024 09:29:21 +0000 Subject: [PATCH 74/96] Added: doc tweaks --- CONTRIBUTING.md | 2 +- README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 244647c4..432eb098 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -38,4 +38,4 @@ We abide by the upstream Code of Conduct at https://github.com/IQSS/dataverse/bl Please, do not hesitate to contact us through: - Zulip: https://dataverse.zulipchat.com/#narrow/stream/410361-ui-dev -- Google Groups: https://groups.google.com/g/dataverse-dev +- Google Group: https://groups.google.com/g/dataverse-dev diff --git a/README.md b/README.md index 97440490..c88d7b79 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# dataverse-client-javascript +# js-dataverse [![npm](https://img.shields.io/npm/v/js-dataverse.svg)](https://www.npmjs.com/package/js-dataverse) From 39ee39f15de1fc36fcf7787832209e9f68a89d11 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 5 Feb 2024 10:15:18 +0000 Subject: [PATCH 75/96] Added: docs for GetFile use case --- docs/useCases.md | 28 ++++++++++++++++++++++++++++ src/files/domain/useCases/GetFile.ts | 7 +++++++ 2 files changed, 35 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index 7ed393bc..7344934f 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -20,6 +20,7 @@ The different use cases currently available in the package are classified below, - [List All Datasets](#list-all-datasets) - [Files](#Files) - [Files read use cases](#files-read-use-cases) + - [Get a File](#get-a-file) - [Get File Counts in a Dataset](#get-file-counts-in-a-dataset) - [Get File Data Tables](#get-file-data-tables) - [Get File Download Count](#get-file-download-count) @@ -238,6 +239,33 @@ The `DatasetPreviewSubset`returned instance contains a property called `totalDat ### Files read use cases +#### Get a File + +Returns a [File](../src/files/domain/models/Dataset.ts) instance, given the search parameters to identify it. + +##### Example call: + +```typescript +import { getFile } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId = 2; +const datasetVersionId = '1.0'; + +getFile.execute(fileId, datasetVersionId).then((file: File) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFile.ts)_ definition. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. + #### Get File Counts in a Dataset Returns an instance of [FileCounts](../src/files/domain/models/FileCounts.ts), containing the requested Dataset total file count, as well as file counts for the following file properties: diff --git a/src/files/domain/useCases/GetFile.ts b/src/files/domain/useCases/GetFile.ts index 64f30052..6eb65991 100644 --- a/src/files/domain/useCases/GetFile.ts +++ b/src/files/domain/useCases/GetFile.ts @@ -5,6 +5,13 @@ import { DatasetNotNumberedVersion } from '../../../datasets'; export class GetFile { constructor(private readonly filesRepository: IFilesRepository) {} + /** + * Returns a File instance, given the search parameters to identify it. + * + * @param {number | string} [fileId] - The File identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @returns {Promise} + */ async execute( fileId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, From 9e39abf59ccbd07ddbaab00ad8842467a6db1b32 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 5 Feb 2024 10:16:34 +0000 Subject: [PATCH 76/96] Fixed: doc link --- docs/useCases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index 7344934f..ff12bae6 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -241,7 +241,7 @@ The `DatasetPreviewSubset`returned instance contains a property called `totalDat #### Get a File -Returns a [File](../src/files/domain/models/Dataset.ts) instance, given the search parameters to identify it. +Returns a [File](../src/files/domain/models/File.ts) instance, given the search parameters to identify it. ##### Example call: From 727d06f82bdaf6c3e31a31410779f57619da2bb9 Mon Sep 17 00:00:00 2001 From: MellyGray Date: Tue, 6 Feb 2024 16:34:02 +0100 Subject: [PATCH 77/96] feat(GetFileCitation): add use case --- .../domain/repositories/IFilesRepository.ts | 1 + src/files/domain/useCases/GetFileCitation.ts | 19 +++++++++ src/files/index.ts | 3 ++ .../infra/repositories/FilesRepository.ts | 10 +++++ test/unit/files/GetFileCitation.test.ts | 39 +++++++++++++++++++ 5 files changed, 72 insertions(+) create mode 100644 src/files/domain/useCases/GetFileCitation.ts create mode 100644 test/unit/files/GetFileCitation.test.ts diff --git a/src/files/domain/repositories/IFilesRepository.ts b/src/files/domain/repositories/IFilesRepository.ts index fc9b6dda..efd38b2d 100644 --- a/src/files/domain/repositories/IFilesRepository.ts +++ b/src/files/domain/repositories/IFilesRepository.ts @@ -39,4 +39,5 @@ export interface IFilesRepository { getFileDataTables(fileId: number | string): Promise; getFile(fileId: number | string, datasetVersionId: string): Promise; + getFileCitation(fileId: number | string, datasetVersionId: string, includeDeaccessioned: boolean): Promise; } diff --git a/src/files/domain/useCases/GetFileCitation.ts b/src/files/domain/useCases/GetFileCitation.ts new file mode 100644 index 00000000..2daccc1d --- /dev/null +++ b/src/files/domain/useCases/GetFileCitation.ts @@ -0,0 +1,19 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase'; +import { IFilesRepository } from '../repositories/IFilesRepository'; +import { DatasetNotNumberedVersion } from '../../../datasets'; + +export class GetFileCitation implements UseCase { + private filesRepository: IFilesRepository; + + constructor(filesRepository: IFilesRepository) { + this.filesRepository = filesRepository; + } + + async execute( + fileId: number, + datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, + includeDeaccessioned = false, + ): Promise { + return await this.filesRepository.getFileCitation(fileId, datasetVersionId, includeDeaccessioned); + } +} diff --git a/src/files/index.ts b/src/files/index.ts index 45b8f270..c50c3785 100644 --- a/src/files/index.ts +++ b/src/files/index.ts @@ -6,6 +6,7 @@ import { GetFileUserPermissions } from './domain/useCases/GetFileUserPermissions import { GetFileDataTables } from './domain/useCases/GetFileDataTables'; import { GetDatasetFilesTotalDownloadSize } from './domain/useCases/GetDatasetFilesTotalDownloadSize'; import { GetFile } from './domain/useCases/GetFile'; +import { GetFileCitation } from './domain/useCases/GetFileCitation'; const filesRepository = new FilesRepository(); @@ -16,6 +17,7 @@ const getFileUserPermissions = new GetFileUserPermissions(filesRepository); const getFileDataTables = new GetFileDataTables(filesRepository); const getDatasetFilesTotalDownloadSize = new GetDatasetFilesTotalDownloadSize(filesRepository); const getFile = new GetFile(filesRepository); +const getFileCitation = new GetFileCitation(filesRepository); export { getDatasetFiles, @@ -25,6 +27,7 @@ export { getDatasetFileCounts, getDatasetFilesTotalDownloadSize, getFile, + getFileCitation, }; export { File, FileEmbargo, FileChecksum } from './domain/models/File'; diff --git a/src/files/infra/repositories/FilesRepository.ts b/src/files/infra/repositories/FilesRepository.ts index dff4031b..aed27b73 100644 --- a/src/files/infra/repositories/FilesRepository.ts +++ b/src/files/infra/repositories/FilesRepository.ts @@ -153,6 +153,16 @@ export class FilesRepository extends ApiRepository implements IFilesRepository { }); } + public async getFileCitation( + fileId: number | string, + datasetVersionId: string, + includeDeaccessioned: boolean, + ): Promise { + return Promise.resolve( + `Requesting a file citation is not yet supported. ${fileId} ${datasetVersionId} ${includeDeaccessioned}`, + ); + } + private getFileEndpoint(fileId: number | string, datasetVersionId: string): string { if (datasetVersionId === DatasetNotNumberedVersion.DRAFT) { return this.buildApiEndpoint(this.filesResourceName, 'draft', fileId); diff --git a/test/unit/files/GetFileCitation.test.ts b/test/unit/files/GetFileCitation.test.ts new file mode 100644 index 00000000..ac40b547 --- /dev/null +++ b/test/unit/files/GetFileCitation.test.ts @@ -0,0 +1,39 @@ +import {assert, createSandbox, SinonSandbox} from "sinon"; +import {DatasetNotNumberedVersion, ReadError} from "../../../src"; +import {IFilesRepository} from "../../../src/files/domain/repositories/IFilesRepository"; +import {GetFileCitation} from "../../../src/files/domain/useCases/GetFileCitation"; + +describe('execute', () => { + const sandbox: SinonSandbox = createSandbox(); + const testId = 1; + + afterEach(() => { + sandbox.restore(); + }); + + test('should return successful result with file citation on repository success', async () => { + const testCitation = 'test citation'; + const filesRepositoryStub = {}; + const getFileCitation = sandbox.stub().returns(testCitation); + filesRepositoryStub.getFileCitation = getFileCitation; + + const sut = new GetFileCitation(filesRepositoryStub); + + const actual = await sut.execute(testId); + + assert.match(actual, testCitation); + assert.calledWithExactly(getFileCitation, testId, DatasetNotNumberedVersion.LATEST, false); + }); + + test('should return error result on repository error', async () => { + const filesRepositoryStub = {}; + const testReadError = new ReadError(); + filesRepositoryStub.getFileCitation = sandbox.stub().throwsException(testReadError); + const sut = new GetFileCitation(filesRepositoryStub); + + let actualError: ReadError = undefined; + await sut.execute(testId).catch((e) => (actualError = e)); + + assert.match(actualError, testReadError); + }); +}) \ No newline at end of file From 32291ed9024c32fb59137e74f104cf662709da44 Mon Sep 17 00:00:00 2001 From: MellyGray Date: Wed, 7 Feb 2024 12:28:40 +0100 Subject: [PATCH 78/96] feat(GetFileCitation): add integration with Dataverse API --- .../infra/repositories/FilesRepository.ts | 12 +++-- .../datasets/DatasetsRepository.test.ts | 7 +-- test/integration/environment/.env | 4 +- .../integration/files/FilesRepository.test.ts | 48 +++++++++++++++++ test/unit/files/FilesRepository.test.ts | 53 +++++++++++++++++++ 5 files changed, 113 insertions(+), 11 deletions(-) diff --git a/src/files/infra/repositories/FilesRepository.ts b/src/files/infra/repositories/FilesRepository.ts index aed27b73..25a54296 100644 --- a/src/files/infra/repositories/FilesRepository.ts +++ b/src/files/infra/repositories/FilesRepository.ts @@ -158,9 +158,15 @@ export class FilesRepository extends ApiRepository implements IFilesRepository { datasetVersionId: string, includeDeaccessioned: boolean, ): Promise { - return Promise.resolve( - `Requesting a file citation is not yet supported. ${fileId} ${datasetVersionId} ${includeDeaccessioned}`, - ); + return this.doGet( + this.buildApiEndpoint(this.filesResourceName, `versions/${datasetVersionId}/citation`, fileId), + true, + { includeDeaccessioned: includeDeaccessioned }, + ) + .then((response) => response.data.data.message) + .catch((error) => { + throw error; + }); } private getFileEndpoint(fileId: number | string, datasetVersionId: string): string { diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index 3ce4fe82..ff14e769 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -81,11 +81,6 @@ describe('DatasetsRepository', () => { expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_1_ID); }); - test('should return dataset when it exists filtering by id and version id', async () => { - const actual = await sut.getDataset(TestConstants.TEST_CREATED_DATASET_1_ID, latestVersionId, false); - expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_1_ID); - }); - test('should return dataset when it is deaccessioned and includeDeaccessioned param is set', async () => { await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID) .then() @@ -238,7 +233,7 @@ describe('DatasetsRepository', () => { assert.match(actual.length, 1); assert.match(actual[0].lockType, DatasetLockType.FINALIZE_PUBLICATION); assert.match(actual[0].userId, 'dataverseAdmin'); - assert.match(actual[0].message, 'Publishing the dataset; Validating Datafiles Asynchronously'); + assert.match(actual[0].message, 'Publishing the dataset; Registering PIDs for Datafiles; Validating Datafiles Asynchronously'); }); test('should return error when dataset does not exist', async () => { diff --git a/test/integration/environment/.env b/test/integration/environment/.env index 80e9a14e..fd0e469f 100644 --- a/test/integration/environment/.env +++ b/test/integration/environment/.env @@ -1,6 +1,6 @@ POSTGRES_VERSION=13 DATAVERSE_DB_USER=dataverse SOLR_VERSION=9.3.0 -DATAVERSE_IMAGE_REGISTRY=docker.io -DATAVERSE_IMAGE_TAG=unstable +DATAVERSE_IMAGE_REGISTRY=ghcr.io +DATAVERSE_IMAGE_TAG=10240-file-citation DATAVERSE_BOOTSTRAP_TIMEOUT=5m diff --git a/test/integration/files/FilesRepository.test.ts b/test/integration/files/FilesRepository.test.ts index e0d9897c..7241f163 100644 --- a/test/integration/files/FilesRepository.test.ts +++ b/test/integration/files/FilesRepository.test.ts @@ -11,6 +11,7 @@ import { DatasetNotNumberedVersion } from '../../../src/datasets'; import { FileCounts } from '../../../src/files/domain/models/FileCounts'; import { FileDownloadSizeMode } from '../../../src'; import { fail } from 'assert'; +import {deaccessionDatasetViaApi, publishDatasetViaApi, waitForNoLocks} from "../../testHelpers/datasets/datasetHelper"; describe('FilesRepository', () => { const sut: FilesRepository = new FilesRepository(); @@ -519,4 +520,51 @@ describe('FilesRepository', () => { }); }); }); + describe('getFileCitation', () => { + test('should return citation when file exists', async () => { + const actualFileCitation = await sut.getFileCitation( + testFileId, + DatasetNotNumberedVersion.LATEST, + false, + ); + expect(typeof actualFileCitation).to.be.a('string'); + }); + + test('should return citation when dataset is deaccessioned', async () => { + await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID) + .then() + .catch(() => { + assert.fail('Error while publishing test Dataset'); + }); + + await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_1_ID, 10) + .then() + .catch(() => { + assert.fail('Error while waiting for no locks'); + }); + + await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, '1.0') + .then() + .catch(() => { + assert.fail('Error while deaccessioning test Dataset'); + }); + + const actualFileCitation = await sut.getFileCitation( + testFileId, + DatasetNotNumberedVersion.LATEST, + true, + ); + expect(typeof actualFileCitation).to.be.a('string'); + }); + + test('should return error when file does not exist', async () => { + let error: ReadError = undefined; + await sut.getFileCitation(nonExistentFiledId, DatasetNotNumberedVersion.LATEST, false).catch((e) => (error = e)); + + assert.match( + error.message, + `There was an error when reading the resource. Reason was: [404] File with ID ${nonExistentFiledId} not found.`, + ); + }); + }); }); diff --git a/test/unit/files/FilesRepository.test.ts b/test/unit/files/FilesRepository.test.ts index 4348d1d6..38cd4d92 100644 --- a/test/unit/files/FilesRepository.test.ts +++ b/test/unit/files/FilesRepository.test.ts @@ -874,4 +874,57 @@ describe('FilesRepository', () => { }); }); }); + + describe('getFileCitation', () => { + const testIncludeDeaccessioned = true; + const testCitation = 'test citation'; + const testCitationSuccessfulResponse = { + data: { + status: 'OK', + data: { + message: testCitation, + }, + }, + }; + test('should return citation when response is successful', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').resolves(testCitationSuccessfulResponse); + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST}/citation`; + + // API Key auth + let actual = await sut.getFileCitation(testFile.id, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, + ); + assert.match(actual, testCitation); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + actual = await sut.getFileCitation(testFile.id, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE_INCLUDE_DEACCESSIONED, + ); + assert.match(actual, testCitation); + }); + + test('should return error on repository read error', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: ReadError = undefined; + await sut.getFileCitation(1, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned).catch((e) => (error = e)); + + assert.calledWithExactly( + axiosGetStub, + `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST}/citation`, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, + ); + expect(error).to.be.instanceOf(Error); + }); + }); }); From c0259c70573e72b2886f6d6c4a223806d1b581fc Mon Sep 17 00:00:00 2001 From: MellyGray Date: Thu, 15 Feb 2024 13:07:39 +0100 Subject: [PATCH 79/96] feat(GetFileCitation): set latest-published as the default version --- .../models/DatasetNotNumberedVersion.ts | 1 + src/files/domain/useCases/GetFileCitation.ts | 2 +- .../integration/files/FilesRepository.test.ts | 102 ++++---- test/unit/files/FilesRepository.test.ts | 226 +++++++++--------- test/unit/files/GetFileCitation.test.ts | 58 ++--- 5 files changed, 203 insertions(+), 186 deletions(-) diff --git a/src/datasets/domain/models/DatasetNotNumberedVersion.ts b/src/datasets/domain/models/DatasetNotNumberedVersion.ts index a646b5be..00f46a25 100644 --- a/src/datasets/domain/models/DatasetNotNumberedVersion.ts +++ b/src/datasets/domain/models/DatasetNotNumberedVersion.ts @@ -1,4 +1,5 @@ export enum DatasetNotNumberedVersion { DRAFT = ':draft', LATEST = ':latest', + LATEST_PUBLISHED = ':latest-published', } diff --git a/src/files/domain/useCases/GetFileCitation.ts b/src/files/domain/useCases/GetFileCitation.ts index 2daccc1d..385adfa7 100644 --- a/src/files/domain/useCases/GetFileCitation.ts +++ b/src/files/domain/useCases/GetFileCitation.ts @@ -11,7 +11,7 @@ export class GetFileCitation implements UseCase { async execute( fileId: number, - datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, + datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST_PUBLISHED, includeDeaccessioned = false, ): Promise { return await this.filesRepository.getFileCitation(fileId, datasetVersionId, includeDeaccessioned); diff --git a/test/integration/files/FilesRepository.test.ts b/test/integration/files/FilesRepository.test.ts index 7241f163..7dfa1bb7 100644 --- a/test/integration/files/FilesRepository.test.ts +++ b/test/integration/files/FilesRepository.test.ts @@ -3,7 +3,7 @@ import { ApiConfig, DataverseApiAuthMechanism } from '../../../src/core/infra/re import { assert } from 'sinon'; import { expect } from 'chai'; import { TestConstants } from '../../testHelpers/TestConstants'; -import {registerFileViaApi, uploadFileViaApi} from '../../testHelpers/files/filesHelper'; +import { registerFileViaApi, uploadFileViaApi } from '../../testHelpers/files/filesHelper'; import { DatasetsRepository } from '../../../src/datasets/infra/repositories/DatasetsRepository'; import { ReadError } from '../../../src/core/domain/repositories/ReadError'; import { FileSearchCriteria, FileAccessStatus, FileOrderCriteria } from '../../../src/files/domain/models/FileCriteria'; @@ -11,7 +11,11 @@ import { DatasetNotNumberedVersion } from '../../../src/datasets'; import { FileCounts } from '../../../src/files/domain/models/FileCounts'; import { FileDownloadSizeMode } from '../../../src'; import { fail } from 'assert'; -import {deaccessionDatasetViaApi, publishDatasetViaApi, waitForNoLocks} from "../../testHelpers/datasets/datasetHelper"; +import { + deaccessionDatasetViaApi, + publishDatasetViaApi, + waitForNoLocks, +} from '../../testHelpers/datasets/datasetHelper'; describe('FilesRepository', () => { const sut: FilesRepository = new FilesRepository(); @@ -33,7 +37,9 @@ describe('FilesRepository', () => { beforeAll(async () => { ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY); // Uploading test file 1 with some categories - const uploadFileResponse = await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTextFile1Name, { categories: [testCategoryName] }) + const uploadFileResponse = await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTextFile1Name, { + categories: [testCategoryName], + }) .then() .catch((e) => { console.log(e); @@ -63,11 +69,11 @@ describe('FilesRepository', () => { // Registering test file 1 await registerFileViaApi(uploadFileResponse.data.data.files[0].dataFile.id); const filesSubset = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestDatasetVersionId, - false, - FileOrderCriteria.NAME_AZ, - ) + TestConstants.TEST_CREATED_DATASET_1_ID, + latestDatasetVersionId, + false, + FileOrderCriteria.NAME_AZ, + ); testFileId = filesSubset.files[0].id; testFilePersistentId = filesSubset.files[0].persistentId; }); @@ -447,11 +453,11 @@ describe('FilesRepository', () => { describe('getFile', () => { describe('by numeric id', () => { - test('should return file when providing a valid id', async () => { - const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.LATEST); + test('should return file when providing a valid id', async () => { + const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.LATEST); - assert.match(actual.name, testTextFile1Name); - }); + assert.match(actual.name, testTextFile1Name); + }); test('should return file draft when providing a valid id and version is draft', async () => { const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.DRAFT); @@ -466,21 +472,21 @@ describe('FilesRepository', () => { await sut.getFile(testFileId, '1.0').catch((e) => (error = e)); assert.match( - error.message, - `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, + error.message, + `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, ); }); - test('should return error when file does not exist', async () => { - let error: ReadError = undefined; + test('should return error when file does not exist', async () => { + let error: ReadError = undefined; - await sut.getFile(nonExistentFiledId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); + await sut.getFile(nonExistentFiledId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); - assert.match( - error.message, - `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, - ); - }); + assert.match( + error.message, + `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, + ); + }); }); describe('by persistent id', () => { test('should return file when providing a valid persistent id', async () => { @@ -502,8 +508,8 @@ describe('FilesRepository', () => { await sut.getFile(testFilePersistentId, '1.0').catch((e) => (error = e)); assert.match( - error.message, - `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, + error.message, + `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, ); }); @@ -514,56 +520,54 @@ describe('FilesRepository', () => { await sut.getFile(nonExistentFiledPersistentId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); assert.match( - error.message, - `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, + error.message, + `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, ); }); }); }); describe('getFileCitation', () => { test('should return citation when file exists', async () => { - const actualFileCitation = await sut.getFileCitation( - testFileId, - DatasetNotNumberedVersion.LATEST, - false, - ); + const actualFileCitation = await sut.getFileCitation(testFileId, DatasetNotNumberedVersion.DRAFT, false); expect(typeof actualFileCitation).to.be.a('string'); }); test('should return citation when dataset is deaccessioned', async () => { await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID) - .then() - .catch(() => { - assert.fail('Error while publishing test Dataset'); - }); + .then() + .catch(() => { + assert.fail('Error while publishing test Dataset'); + }); await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_1_ID, 10) - .then() - .catch(() => { - assert.fail('Error while waiting for no locks'); - }); + .then() + .catch(() => { + assert.fail('Error while waiting for no locks'); + }); await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, '1.0') - .then() - .catch(() => { - assert.fail('Error while deaccessioning test Dataset'); - }); + .then() + .catch(() => { + assert.fail('Error while deaccessioning test Dataset'); + }); const actualFileCitation = await sut.getFileCitation( - testFileId, - DatasetNotNumberedVersion.LATEST, - true, + testFileId, + DatasetNotNumberedVersion.LATEST_PUBLISHED, + true, ); expect(typeof actualFileCitation).to.be.a('string'); }); test('should return error when file does not exist', async () => { let error: ReadError = undefined; - await sut.getFileCitation(nonExistentFiledId, DatasetNotNumberedVersion.LATEST, false).catch((e) => (error = e)); + await sut + .getFileCitation(nonExistentFiledId, DatasetNotNumberedVersion.LATEST_PUBLISHED, false) + .catch((e) => (error = e)); assert.match( - error.message, - `There was an error when reading the resource. Reason was: [404] File with ID ${nonExistentFiledId} not found.`, + error.message, + `There was an error when reading the resource. Reason was: [404] File with ID ${nonExistentFiledId} not found.`, ); }); }); diff --git a/test/unit/files/FilesRepository.test.ts b/test/unit/files/FilesRepository.test.ts index 38cd4d92..469f25d5 100644 --- a/test/unit/files/FilesRepository.test.ts +++ b/test/unit/files/FilesRepository.test.ts @@ -9,7 +9,7 @@ import { createFileModel, createManyFilesPayload, createFilesSubsetModel, - createFilePayload + createFilePayload, } from '../../testHelpers/files/filesHelper'; import { createFileDataTablePayload, createFileDataTableModel } from '../../testHelpers/files/fileDataTablesHelper'; import { createFileUserPermissionsModel } from '../../testHelpers/files/fileUserPermissionsHelper'; @@ -776,102 +776,104 @@ describe('FilesRepository', () => { }); }); }); - describe('getFile' , () => { + describe('getFile', () => { describe('by numeric id', () => { - const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/`; - const testGetFileResponse = { - data: { - status: 'OK', - data: createFilePayload(), - }, - }; - test('should return file when providing id and response is successful', async () => { - const axiosGetStub = sandbox.stub(axios, 'get').resolves(testGetFileResponse); - - // API Key auth - let actual = await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, - ); - assert.match(actual, createFileModel()); - - // Session cookie auth - ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); - - actual = await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, - ); - assert.match(actual, createFileModel()); - }); - - test('should return error result on error response', async () => { - const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); - - let error: ReadError = undefined; - await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, - ); - expect(error).to.be.instanceOf(Error); - }); + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/`; + const testGetFileResponse = { + data: { + status: 'OK', + data: createFilePayload(), + }, + }; + test('should return file when providing id and response is successful', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').resolves(testGetFileResponse); + + // API Key auth + let actual = await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + assert.match(actual, createFileModel()); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + actual = await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, + ); + assert.match(actual, createFileModel()); + }); + + test('should return error result on error response', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: ReadError = undefined; + await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + expect(error).to.be.instanceOf(Error); + }); }); describe('by persistent id', () => { - const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/:persistentId/?persistentId=${TestConstants.TEST_DUMMY_PERSISTENT_ID}`; - const testGetFileResponse = { - data: { - status: 'OK', - data: createFilePayload(), - }, - }; - test('should return file when providing persistent id and response is successful', async () => { - const axiosGetStub = sandbox.stub(axios, 'get').resolves(testGetFileResponse); - - // API Key auth - let actual = await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, - ); - assert.match(actual, createFileModel()); - - // Session cookie auth - ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); - - actual = await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, - ); - assert.match(actual, createFileModel()); - }); - - test('should return error result on error response', async () => { - const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); - - let error: ReadError = undefined; - await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, - ); - expect(error).to.be.instanceOf(Error); - }); + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/:persistentId/?persistentId=${TestConstants.TEST_DUMMY_PERSISTENT_ID}`; + const testGetFileResponse = { + data: { + status: 'OK', + data: createFilePayload(), + }, + }; + test('should return file when providing persistent id and response is successful', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').resolves(testGetFileResponse); + + // API Key auth + let actual = await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + assert.match(actual, createFileModel()); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + actual = await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, + ); + assert.match(actual, createFileModel()); + }); + + test('should return error result on error response', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: ReadError = undefined; + await sut + .getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST) + .catch((e) => (error = e)); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + expect(error).to.be.instanceOf(Error); + }); }); }); @@ -888,27 +890,35 @@ describe('FilesRepository', () => { }; test('should return citation when response is successful', async () => { const axiosGetStub = sandbox.stub(axios, 'get').resolves(testCitationSuccessfulResponse); - const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST}/citation`; + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST_PUBLISHED}/citation`; // API Key auth - let actual = await sut.getFileCitation(testFile.id, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned); + let actual = await sut.getFileCitation( + testFile.id, + DatasetNotNumberedVersion.LATEST_PUBLISHED, + testIncludeDeaccessioned, + ); assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, ); assert.match(actual, testCitation); // Session cookie auth ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); - actual = await sut.getFileCitation(testFile.id, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned); + actual = await sut.getFileCitation( + testFile.id, + DatasetNotNumberedVersion.LATEST_PUBLISHED, + testIncludeDeaccessioned, + ); assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE_INCLUDE_DEACCESSIONED, + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE_INCLUDE_DEACCESSIONED, ); assert.match(actual, testCitation); }); @@ -917,12 +927,14 @@ describe('FilesRepository', () => { const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); let error: ReadError = undefined; - await sut.getFileCitation(1, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned).catch((e) => (error = e)); + await sut + .getFileCitation(1, DatasetNotNumberedVersion.LATEST_PUBLISHED, testIncludeDeaccessioned) + .catch((e) => (error = e)); assert.calledWithExactly( - axiosGetStub, - `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST}/citation`, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, + axiosGetStub, + `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST_PUBLISHED}/citation`, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, ); expect(error).to.be.instanceOf(Error); }); diff --git a/test/unit/files/GetFileCitation.test.ts b/test/unit/files/GetFileCitation.test.ts index ac40b547..103fdae1 100644 --- a/test/unit/files/GetFileCitation.test.ts +++ b/test/unit/files/GetFileCitation.test.ts @@ -1,39 +1,39 @@ -import {assert, createSandbox, SinonSandbox} from "sinon"; -import {DatasetNotNumberedVersion, ReadError} from "../../../src"; -import {IFilesRepository} from "../../../src/files/domain/repositories/IFilesRepository"; -import {GetFileCitation} from "../../../src/files/domain/useCases/GetFileCitation"; +import { assert, createSandbox, SinonSandbox } from 'sinon'; +import { DatasetNotNumberedVersion, ReadError } from '../../../src'; +import { IFilesRepository } from '../../../src/files/domain/repositories/IFilesRepository'; +import { GetFileCitation } from '../../../src/files/domain/useCases/GetFileCitation'; describe('execute', () => { - const sandbox: SinonSandbox = createSandbox(); - const testId = 1; + const sandbox: SinonSandbox = createSandbox(); + const testId = 1; - afterEach(() => { - sandbox.restore(); - }); + afterEach(() => { + sandbox.restore(); + }); - test('should return successful result with file citation on repository success', async () => { - const testCitation = 'test citation'; - const filesRepositoryStub = {}; - const getFileCitation = sandbox.stub().returns(testCitation); - filesRepositoryStub.getFileCitation = getFileCitation; + test('should return successful result with file citation on repository success', async () => { + const testCitation = 'test citation'; + const filesRepositoryStub = {}; + const getFileCitation = sandbox.stub().returns(testCitation); + filesRepositoryStub.getFileCitation = getFileCitation; - const sut = new GetFileCitation(filesRepositoryStub); + const sut = new GetFileCitation(filesRepositoryStub); - const actual = await sut.execute(testId); + const actual = await sut.execute(testId); - assert.match(actual, testCitation); - assert.calledWithExactly(getFileCitation, testId, DatasetNotNumberedVersion.LATEST, false); - }); + assert.match(actual, testCitation); + assert.calledWithExactly(getFileCitation, testId, DatasetNotNumberedVersion.LATEST_PUBLISHED, false); + }); - test('should return error result on repository error', async () => { - const filesRepositoryStub = {}; - const testReadError = new ReadError(); - filesRepositoryStub.getFileCitation = sandbox.stub().throwsException(testReadError); - const sut = new GetFileCitation(filesRepositoryStub); + test('should return error result on repository error', async () => { + const filesRepositoryStub = {}; + const testReadError = new ReadError(); + filesRepositoryStub.getFileCitation = sandbox.stub().throwsException(testReadError); + const sut = new GetFileCitation(filesRepositoryStub); - let actualError: ReadError = undefined; - await sut.execute(testId).catch((e) => (actualError = e)); + let actualError: ReadError = undefined; + await sut.execute(testId).catch((e) => (actualError = e)); - assert.match(actualError, testReadError); - }); -}) \ No newline at end of file + assert.match(actualError, testReadError); + }); +}); From 463d37236bc31424ff3aca6d429488d0d7928b29 Mon Sep 17 00:00:00 2001 From: MellyGray Date: Fri, 16 Feb 2024 18:17:55 +0100 Subject: [PATCH 80/96] Revert "feat(GetFileCitation): set latest-published as the default version" This reverts commit c0259c70573e72b2886f6d6c4a223806d1b581fc. --- .../models/DatasetNotNumberedVersion.ts | 1 - src/files/domain/useCases/GetFileCitation.ts | 2 +- .../integration/files/FilesRepository.test.ts | 102 ++++---- test/unit/files/FilesRepository.test.ts | 226 +++++++++--------- test/unit/files/GetFileCitation.test.ts | 58 ++--- 5 files changed, 186 insertions(+), 203 deletions(-) diff --git a/src/datasets/domain/models/DatasetNotNumberedVersion.ts b/src/datasets/domain/models/DatasetNotNumberedVersion.ts index 00f46a25..a646b5be 100644 --- a/src/datasets/domain/models/DatasetNotNumberedVersion.ts +++ b/src/datasets/domain/models/DatasetNotNumberedVersion.ts @@ -1,5 +1,4 @@ export enum DatasetNotNumberedVersion { DRAFT = ':draft', LATEST = ':latest', - LATEST_PUBLISHED = ':latest-published', } diff --git a/src/files/domain/useCases/GetFileCitation.ts b/src/files/domain/useCases/GetFileCitation.ts index 385adfa7..2daccc1d 100644 --- a/src/files/domain/useCases/GetFileCitation.ts +++ b/src/files/domain/useCases/GetFileCitation.ts @@ -11,7 +11,7 @@ export class GetFileCitation implements UseCase { async execute( fileId: number, - datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST_PUBLISHED, + datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, includeDeaccessioned = false, ): Promise { return await this.filesRepository.getFileCitation(fileId, datasetVersionId, includeDeaccessioned); diff --git a/test/integration/files/FilesRepository.test.ts b/test/integration/files/FilesRepository.test.ts index 7dfa1bb7..7241f163 100644 --- a/test/integration/files/FilesRepository.test.ts +++ b/test/integration/files/FilesRepository.test.ts @@ -3,7 +3,7 @@ import { ApiConfig, DataverseApiAuthMechanism } from '../../../src/core/infra/re import { assert } from 'sinon'; import { expect } from 'chai'; import { TestConstants } from '../../testHelpers/TestConstants'; -import { registerFileViaApi, uploadFileViaApi } from '../../testHelpers/files/filesHelper'; +import {registerFileViaApi, uploadFileViaApi} from '../../testHelpers/files/filesHelper'; import { DatasetsRepository } from '../../../src/datasets/infra/repositories/DatasetsRepository'; import { ReadError } from '../../../src/core/domain/repositories/ReadError'; import { FileSearchCriteria, FileAccessStatus, FileOrderCriteria } from '../../../src/files/domain/models/FileCriteria'; @@ -11,11 +11,7 @@ import { DatasetNotNumberedVersion } from '../../../src/datasets'; import { FileCounts } from '../../../src/files/domain/models/FileCounts'; import { FileDownloadSizeMode } from '../../../src'; import { fail } from 'assert'; -import { - deaccessionDatasetViaApi, - publishDatasetViaApi, - waitForNoLocks, -} from '../../testHelpers/datasets/datasetHelper'; +import {deaccessionDatasetViaApi, publishDatasetViaApi, waitForNoLocks} from "../../testHelpers/datasets/datasetHelper"; describe('FilesRepository', () => { const sut: FilesRepository = new FilesRepository(); @@ -37,9 +33,7 @@ describe('FilesRepository', () => { beforeAll(async () => { ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY); // Uploading test file 1 with some categories - const uploadFileResponse = await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTextFile1Name, { - categories: [testCategoryName], - }) + const uploadFileResponse = await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTextFile1Name, { categories: [testCategoryName] }) .then() .catch((e) => { console.log(e); @@ -69,11 +63,11 @@ describe('FilesRepository', () => { // Registering test file 1 await registerFileViaApi(uploadFileResponse.data.data.files[0].dataFile.id); const filesSubset = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestDatasetVersionId, - false, - FileOrderCriteria.NAME_AZ, - ); + TestConstants.TEST_CREATED_DATASET_1_ID, + latestDatasetVersionId, + false, + FileOrderCriteria.NAME_AZ, + ) testFileId = filesSubset.files[0].id; testFilePersistentId = filesSubset.files[0].persistentId; }); @@ -453,11 +447,11 @@ describe('FilesRepository', () => { describe('getFile', () => { describe('by numeric id', () => { - test('should return file when providing a valid id', async () => { - const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.LATEST); + test('should return file when providing a valid id', async () => { + const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.LATEST); - assert.match(actual.name, testTextFile1Name); - }); + assert.match(actual.name, testTextFile1Name); + }); test('should return file draft when providing a valid id and version is draft', async () => { const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.DRAFT); @@ -472,21 +466,21 @@ describe('FilesRepository', () => { await sut.getFile(testFileId, '1.0').catch((e) => (error = e)); assert.match( - error.message, - `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, + error.message, + `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, ); }); - test('should return error when file does not exist', async () => { - let error: ReadError = undefined; + test('should return error when file does not exist', async () => { + let error: ReadError = undefined; - await sut.getFile(nonExistentFiledId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); + await sut.getFile(nonExistentFiledId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); - assert.match( - error.message, - `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, - ); - }); + assert.match( + error.message, + `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, + ); + }); }); describe('by persistent id', () => { test('should return file when providing a valid persistent id', async () => { @@ -508,8 +502,8 @@ describe('FilesRepository', () => { await sut.getFile(testFilePersistentId, '1.0').catch((e) => (error = e)); assert.match( - error.message, - `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, + error.message, + `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, ); }); @@ -520,54 +514,56 @@ describe('FilesRepository', () => { await sut.getFile(nonExistentFiledPersistentId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); assert.match( - error.message, - `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, + error.message, + `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, ); }); }); }); describe('getFileCitation', () => { test('should return citation when file exists', async () => { - const actualFileCitation = await sut.getFileCitation(testFileId, DatasetNotNumberedVersion.DRAFT, false); + const actualFileCitation = await sut.getFileCitation( + testFileId, + DatasetNotNumberedVersion.LATEST, + false, + ); expect(typeof actualFileCitation).to.be.a('string'); }); test('should return citation when dataset is deaccessioned', async () => { await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID) - .then() - .catch(() => { - assert.fail('Error while publishing test Dataset'); - }); + .then() + .catch(() => { + assert.fail('Error while publishing test Dataset'); + }); await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_1_ID, 10) - .then() - .catch(() => { - assert.fail('Error while waiting for no locks'); - }); + .then() + .catch(() => { + assert.fail('Error while waiting for no locks'); + }); await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, '1.0') - .then() - .catch(() => { - assert.fail('Error while deaccessioning test Dataset'); - }); + .then() + .catch(() => { + assert.fail('Error while deaccessioning test Dataset'); + }); const actualFileCitation = await sut.getFileCitation( - testFileId, - DatasetNotNumberedVersion.LATEST_PUBLISHED, - true, + testFileId, + DatasetNotNumberedVersion.LATEST, + true, ); expect(typeof actualFileCitation).to.be.a('string'); }); test('should return error when file does not exist', async () => { let error: ReadError = undefined; - await sut - .getFileCitation(nonExistentFiledId, DatasetNotNumberedVersion.LATEST_PUBLISHED, false) - .catch((e) => (error = e)); + await sut.getFileCitation(nonExistentFiledId, DatasetNotNumberedVersion.LATEST, false).catch((e) => (error = e)); assert.match( - error.message, - `There was an error when reading the resource. Reason was: [404] File with ID ${nonExistentFiledId} not found.`, + error.message, + `There was an error when reading the resource. Reason was: [404] File with ID ${nonExistentFiledId} not found.`, ); }); }); diff --git a/test/unit/files/FilesRepository.test.ts b/test/unit/files/FilesRepository.test.ts index 469f25d5..38cd4d92 100644 --- a/test/unit/files/FilesRepository.test.ts +++ b/test/unit/files/FilesRepository.test.ts @@ -9,7 +9,7 @@ import { createFileModel, createManyFilesPayload, createFilesSubsetModel, - createFilePayload, + createFilePayload } from '../../testHelpers/files/filesHelper'; import { createFileDataTablePayload, createFileDataTableModel } from '../../testHelpers/files/fileDataTablesHelper'; import { createFileUserPermissionsModel } from '../../testHelpers/files/fileUserPermissionsHelper'; @@ -776,104 +776,102 @@ describe('FilesRepository', () => { }); }); }); - describe('getFile', () => { + describe('getFile' , () => { describe('by numeric id', () => { - const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/`; - const testGetFileResponse = { - data: { - status: 'OK', - data: createFilePayload(), - }, - }; - test('should return file when providing id and response is successful', async () => { - const axiosGetStub = sandbox.stub(axios, 'get').resolves(testGetFileResponse); - - // API Key auth - let actual = await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, - ); - assert.match(actual, createFileModel()); - - // Session cookie auth - ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); - - actual = await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, - ); - assert.match(actual, createFileModel()); - }); - - test('should return error result on error response', async () => { - const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); - - let error: ReadError = undefined; - await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, - ); - expect(error).to.be.instanceOf(Error); - }); + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/`; + const testGetFileResponse = { + data: { + status: 'OK', + data: createFilePayload(), + }, + }; + test('should return file when providing id and response is successful', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').resolves(testGetFileResponse); + + // API Key auth + let actual = await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + assert.match(actual, createFileModel()); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + actual = await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, + ); + assert.match(actual, createFileModel()); + }); + + test('should return error result on error response', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: ReadError = undefined; + await sut.getFile(testFile.id, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + expect(error).to.be.instanceOf(Error); + }); }); describe('by persistent id', () => { - const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/:persistentId/?persistentId=${TestConstants.TEST_DUMMY_PERSISTENT_ID}`; - const testGetFileResponse = { - data: { - status: 'OK', - data: createFilePayload(), - }, - }; - test('should return file when providing persistent id and response is successful', async () => { - const axiosGetStub = sandbox.stub(axios, 'get').resolves(testGetFileResponse); - - // API Key auth - let actual = await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, - ); - assert.match(actual, createFileModel()); - - // Session cookie auth - ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); - - actual = await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, - ); - assert.match(actual, createFileModel()); - }); - - test('should return error result on error response', async () => { - const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); - - let error: ReadError = undefined; - await sut - .getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST) - .catch((e) => (error = e)); - - assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, - ); - expect(error).to.be.instanceOf(Error); - }); + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/:persistentId/?persistentId=${TestConstants.TEST_DUMMY_PERSISTENT_ID}`; + const testGetFileResponse = { + data: { + status: 'OK', + data: createFilePayload(), + }, + }; + test('should return file when providing persistent id and response is successful', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').resolves(testGetFileResponse); + + // API Key auth + let actual = await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + assert.match(actual, createFileModel()); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + actual = await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, + ); + assert.match(actual, createFileModel()); + }); + + test('should return error result on error response', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: ReadError = undefined; + await sut.getFile(TestConstants.TEST_DUMMY_PERSISTENT_ID, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + expect(error).to.be.instanceOf(Error); + }); }); }); @@ -890,35 +888,27 @@ describe('FilesRepository', () => { }; test('should return citation when response is successful', async () => { const axiosGetStub = sandbox.stub(axios, 'get').resolves(testCitationSuccessfulResponse); - const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST_PUBLISHED}/citation`; + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST}/citation`; // API Key auth - let actual = await sut.getFileCitation( - testFile.id, - DatasetNotNumberedVersion.LATEST_PUBLISHED, - testIncludeDeaccessioned, - ); + let actual = await sut.getFileCitation(testFile.id, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned); assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, ); assert.match(actual, testCitation); // Session cookie auth ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); - actual = await sut.getFileCitation( - testFile.id, - DatasetNotNumberedVersion.LATEST_PUBLISHED, - testIncludeDeaccessioned, - ); + actual = await sut.getFileCitation(testFile.id, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned); assert.calledWithExactly( - axiosGetStub, - expectedApiEndpoint, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE_INCLUDE_DEACCESSIONED, + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE_INCLUDE_DEACCESSIONED, ); assert.match(actual, testCitation); }); @@ -927,14 +917,12 @@ describe('FilesRepository', () => { const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); let error: ReadError = undefined; - await sut - .getFileCitation(1, DatasetNotNumberedVersion.LATEST_PUBLISHED, testIncludeDeaccessioned) - .catch((e) => (error = e)); + await sut.getFileCitation(1, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned).catch((e) => (error = e)); assert.calledWithExactly( - axiosGetStub, - `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST_PUBLISHED}/citation`, - TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, + axiosGetStub, + `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST}/citation`, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, ); expect(error).to.be.instanceOf(Error); }); diff --git a/test/unit/files/GetFileCitation.test.ts b/test/unit/files/GetFileCitation.test.ts index 103fdae1..ac40b547 100644 --- a/test/unit/files/GetFileCitation.test.ts +++ b/test/unit/files/GetFileCitation.test.ts @@ -1,39 +1,39 @@ -import { assert, createSandbox, SinonSandbox } from 'sinon'; -import { DatasetNotNumberedVersion, ReadError } from '../../../src'; -import { IFilesRepository } from '../../../src/files/domain/repositories/IFilesRepository'; -import { GetFileCitation } from '../../../src/files/domain/useCases/GetFileCitation'; +import {assert, createSandbox, SinonSandbox} from "sinon"; +import {DatasetNotNumberedVersion, ReadError} from "../../../src"; +import {IFilesRepository} from "../../../src/files/domain/repositories/IFilesRepository"; +import {GetFileCitation} from "../../../src/files/domain/useCases/GetFileCitation"; describe('execute', () => { - const sandbox: SinonSandbox = createSandbox(); - const testId = 1; + const sandbox: SinonSandbox = createSandbox(); + const testId = 1; - afterEach(() => { - sandbox.restore(); - }); + afterEach(() => { + sandbox.restore(); + }); - test('should return successful result with file citation on repository success', async () => { - const testCitation = 'test citation'; - const filesRepositoryStub = {}; - const getFileCitation = sandbox.stub().returns(testCitation); - filesRepositoryStub.getFileCitation = getFileCitation; + test('should return successful result with file citation on repository success', async () => { + const testCitation = 'test citation'; + const filesRepositoryStub = {}; + const getFileCitation = sandbox.stub().returns(testCitation); + filesRepositoryStub.getFileCitation = getFileCitation; - const sut = new GetFileCitation(filesRepositoryStub); + const sut = new GetFileCitation(filesRepositoryStub); - const actual = await sut.execute(testId); + const actual = await sut.execute(testId); - assert.match(actual, testCitation); - assert.calledWithExactly(getFileCitation, testId, DatasetNotNumberedVersion.LATEST_PUBLISHED, false); - }); + assert.match(actual, testCitation); + assert.calledWithExactly(getFileCitation, testId, DatasetNotNumberedVersion.LATEST, false); + }); - test('should return error result on repository error', async () => { - const filesRepositoryStub = {}; - const testReadError = new ReadError(); - filesRepositoryStub.getFileCitation = sandbox.stub().throwsException(testReadError); - const sut = new GetFileCitation(filesRepositoryStub); + test('should return error result on repository error', async () => { + const filesRepositoryStub = {}; + const testReadError = new ReadError(); + filesRepositoryStub.getFileCitation = sandbox.stub().throwsException(testReadError); + const sut = new GetFileCitation(filesRepositoryStub); - let actualError: ReadError = undefined; - await sut.execute(testId).catch((e) => (actualError = e)); + let actualError: ReadError = undefined; + await sut.execute(testId).catch((e) => (actualError = e)); - assert.match(actualError, testReadError); - }); -}); + assert.match(actualError, testReadError); + }); +}) \ No newline at end of file From e6f662997cc668bff2003001cc6dbed5c8529279 Mon Sep 17 00:00:00 2001 From: MellyGray Date: Fri, 16 Feb 2024 18:26:02 +0100 Subject: [PATCH 81/96] fix: change env to point to unstable image --- test/integration/environment/.env | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/integration/environment/.env b/test/integration/environment/.env index fd0e469f..80e9a14e 100644 --- a/test/integration/environment/.env +++ b/test/integration/environment/.env @@ -1,6 +1,6 @@ POSTGRES_VERSION=13 DATAVERSE_DB_USER=dataverse SOLR_VERSION=9.3.0 -DATAVERSE_IMAGE_REGISTRY=ghcr.io -DATAVERSE_IMAGE_TAG=10240-file-citation +DATAVERSE_IMAGE_REGISTRY=docker.io +DATAVERSE_IMAGE_TAG=unstable DATAVERSE_BOOTSTRAP_TIMEOUT=5m From c846f8535ba13c71bc39cb2ff5f3da9574de54b3 Mon Sep 17 00:00:00 2001 From: MellyGray Date: Mon, 19 Feb 2024 12:20:19 +0100 Subject: [PATCH 82/96] feat(Guide): add .npmrc instructions --- docs/installation.md | 55 +++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 52 insertions(+), 3 deletions(-) diff --git a/docs/installation.md b/docs/installation.md index d3177635..f347c604 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -1,8 +1,8 @@ # Installation -## NPM +Recommended versions node >=16 and npm >=8. -### Stable version +## Getting Started with the Stable version A stable 1.x version of this package is available as `js-dataverse` at https://www.npmjs.com/package/js-dataverse @@ -12,12 +12,61 @@ Install the package stable version using npm: npm install js-dataverse ``` -### Development versions +## Getting Started with the Development Version An unstable 2.x version of this package with breaking changes is under development. Until a 2.0 version is officially released, it can be installed from https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript + +### Create a `.npmrc` file and add a token + +To install the [@iqss/dataverse-client-javascript](https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript) +from the GitHub registry, follow these steps to create an `.npmrc` file in the root of your project using your GitHub token. + +1. **Create `.npmrc`** in your project's root directory. + + ```bash + touch .npmrc + ``` + +2. **Replace the Token** + + Open the newly created `.npmrc` file and replace `YOUR_GITHUB_TOKEN` with your actual GitHub token. + + ```plaintext + legacy-peer-deps=true + + //npm.pkg.github.com/:_authToken= + @iqss:registry=https://npm.pkg.github.com/ + ``` + +#### How to Get a GitHub Token + +If you don't have a GitHub token yet, follow these steps: + +1. Go to your GitHub account settings. + +2. Navigate to "Developer settings" -> "Personal access tokens." + +3. Click "Personal access tokens" -> "Tokens (classic)" -> "Generate new token (classic)". + +4. Give the token a name and select the "read:packages" scope. + +5. Copy the generated token. + +6. Replace `YOUR_GITHUB_AUTH_TOKEN` in the `.npmrc` file with the copied token. + +Now, you should be able to install the Dataverse JavaScript client using npm. + +### Install the package + +Install the package development version using npm: + +```bash +npm install @iqss/dataverse-client-javascript +``` + ## Initialization In order for the package to connect to the Dataverse API, there is an `APIConfig` object that should be initialized to set the preferred authentication mechanism with the associated credentials for connecting to the Dataverse API. From dbdbeaee421f215a79e7fea9958e9d4f92b09b67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melina=20Hern=C3=A1ndez?= Date: Mon, 19 Feb 2024 12:23:15 +0100 Subject: [PATCH 83/96] feat(Guide): re-write some lines for clarity Co-authored-by: Ellen Kraffmiller --- docs/installation.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/installation.md b/docs/installation.md index d3177635..97d73fb7 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -26,7 +26,7 @@ Currently, the supported authentication mechanisms are: - **API Key**: The recommended authentication mechanism. The API Key should correspond to a particular Dataverse user account. -- **Session Cookie**: This is an experimental feature primarily designed for Dataverse SPA development. It is necessary to enable the corresponding feature flag in the Dataverse installation to use this mechanism (See https://guides.dataverse.org/en/latest/installation/config.html?#feature-flags). It is recommended not to use this mechanism and instead use API Key authentication. +- **Session Cookie**: This is an experimental feature primarily designed for Dataverse SPA development. To use this mechanism, you must enable the corresponding feature flag in the Dataverse installation (See https://guides.dataverse.org/en/latest/installation/config.html?#feature-flags). It is recommended not to use this mechanism and instead use API Key authentication. It is recommended to globally initialize the `ApiConfig` object from the consuming application, as the configuration will be read on every API call made by the package's use cases. From 04b20d4b97c803cadf07a6f7bd936be929f0731f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melina=20Hern=C3=A1ndez?= Date: Mon, 19 Feb 2024 12:26:40 +0100 Subject: [PATCH 84/96] fix(Guides): wrong import in code example Co-authored-by: Ellen Kraffmiller --- docs/useCases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index ff12bae6..192e465e 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -49,7 +49,7 @@ Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given th ##### Example call: ```typescript -import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript'; +import { getDataset } from '@iqss/dataverse-client-javascript'; /* ... */ From e4135557989afa7a2c2ed732f194aee32a6282d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melina=20Hern=C3=A1ndez?= Date: Mon, 19 Feb 2024 12:28:06 +0100 Subject: [PATCH 85/96] fix(Guides): add optional `datasetVersionId` Co-authored-by: Ellen Kraffmiller --- docs/useCases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index 192e465e..726a7d4b 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -67,7 +67,7 @@ _See [use case](../src/datasets/domain/useCases/GetDataset.ts)_ definition. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. -The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. From 37e0fb008200ab5ce5a61c9b2dd5251b5534fc54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melina=20Hern=C3=A1ndez?= Date: Mon, 19 Feb 2024 12:28:23 +0100 Subject: [PATCH 86/96] fix(Guides): optional `datasetVersionId` Co-authored-by: Ellen Kraffmiller --- docs/useCases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index 726a7d4b..ccdc43ef 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -264,7 +264,7 @@ _See [use case](../src/files/domain/useCases/GetFile.ts)_ definition. The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. -The `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. #### Get File Counts in a Dataset From 4fe029ade62747cbb5bdb0718a80da607eed7ba2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melina=20Hern=C3=A1ndez?= Date: Mon, 19 Feb 2024 12:41:57 +0100 Subject: [PATCH 87/96] fix(Guides): add optional `datasetVersionId` to GetDatasetFileCounts Co-authored-by: Ellen Kraffmiller --- docs/useCases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index ccdc43ef..93a12904 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -295,7 +295,7 @@ getDatasetFileCounts.execute(datasetId, datasetVersionId).then((fileCounts: File _See [use case](../src/files/domain/useCases/GetDatasetFileCounts.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. - +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. An optional fourth parameter `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to retrieve counts only for files that match the specified criteria. From 19559a0a6f316bb80a57b0ea25ab3f46388d5e4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melina=20Hern=C3=A1ndez?= Date: Mon, 19 Feb 2024 12:42:58 +0100 Subject: [PATCH 88/96] fix(Guides): add optional datasetVersionId to GetDatasetFilesTotalDownloadSize Co-authored-by: Ellen Kraffmiller --- docs/useCases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index 93a12904..946de3c0 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -395,7 +395,7 @@ getDatasetFilesTotalDownloadSize.execute(datasetId, datasetVersionId).then((size _See [use case](../src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. - +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. There is a third optional parameter called `fileDownloadSizeMode` which receives an enum type of [FileDownloadSizeMode](../src/files/domain/models/FileDownloadSizeMode.ts), and applies a filter criteria to the operation. This parameter supports the following values: - `FileDownloadSizeMode.ALL` (Default): Includes both archival and original sizes for tabular files From 99afa32803c3cae26c580b726c88826e30f937d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Melina=20Hern=C3=A1ndez?= Date: Mon, 19 Feb 2024 12:43:26 +0100 Subject: [PATCH 89/96] fix(Guides): add optional datasetVersionId to GetDatasetFiles Co-authored-by: Ellen Kraffmiller --- docs/useCases.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index 946de3c0..fc232ec5 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -484,7 +484,7 @@ getDatasetFiles.execute(datasetId, datasetVersionId).then((subset: FilesSubset) _See [use case](../src/files/domain/useCases/GetDatasetFiles.ts) implementation_. The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. - +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. This use case supports the following optional parameters depending on the search goals: - **includeDeaccessioned**: (boolean) Indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. From dd4fbee369702f932be8435de3636ed81952c31d Mon Sep 17 00:00:00 2001 From: MellyGray Date: Mon, 19 Feb 2024 12:56:02 +0100 Subject: [PATCH 90/96] fix(Guides): add embargo duration returned values explanation --- docs/installation.md | 2 +- docs/useCases.md | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/installation.md b/docs/installation.md index 1b4aa4fc..df3efac0 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -2,7 +2,7 @@ Recommended versions node >=16 and npm >=8. -## Getting Started with the Stable version +## Getting Started with the Stable Version A stable 1.x version of this package is available as `js-dataverse` at https://www.npmjs.com/package/js-dataverse diff --git a/docs/useCases.md b/docs/useCases.md index ccdc43ef..4432f294 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -593,7 +593,10 @@ _See [use case](../src/info/domain/useCases/GetDataverseVersion.ts) implementati #### Get Maximum Embargo Duration In Months -Returns a number indicating the configured maximum embargo duration in months. +Returns a number indicating the configured maximum embargo duration in months. For information on the possible values +that can be returned, please refer to the `MaxEmbargoDurationInMonths` property in the Dataverse documentation: +[MaxEmbargoDurationInMonths](https://guides.dataverse.org/en/latest/installation/config.html#maxembargodurationinmonths). + ##### Example call: From 0b358a70e5388d8da7c03d7ac45cf64feb7e88b8 Mon Sep 17 00:00:00 2001 From: MellyGray Date: Mon, 19 Feb 2024 15:33:13 +0100 Subject: [PATCH 91/96] fix: replace turndown library by NodeHtmlMarkdown to fix nextjs incompatibility --- package-lock.json | 133 ++++++++++++++++++ package.json | 13 +- .../transformers/datasetTransformers.ts | 6 +- test/testHelpers/datasets/datasetHelper.ts | 8 +- 4 files changed, 145 insertions(+), 15 deletions(-) diff --git a/package-lock.json b/package-lock.json index d54b28f2..fed65deb 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,6 +12,7 @@ "@types/node": "^18.15.11", "@types/turndown": "^5.0.1", "axios": "^1.3.4", + "node-html-markdown": "^1.3.0", "turndown": "^7.1.2", "typescript": "^4.9.5" }, @@ -2027,6 +2028,11 @@ "readable-stream": "^3.4.0" } }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -2419,6 +2425,32 @@ "node": ">= 8" } }, + "node_modules/css-select": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -2581,11 +2613,62 @@ "node": ">=6.0.0" } }, + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ] + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, "node_modules/domino": { "version": "2.1.6", "resolved": "https://registry.npmjs.org/domino/-/domino-2.1.6.tgz", "integrity": "sha512-3VdM/SXBZX2omc9JF9nOPCtDaYQ67BGp5CoLpIQlO2KCAPETs8TcDHacF26jXadGbvUteZzRTeos2fhID5+ucQ==" }, + "node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, "node_modules/electron-to-chromium": { "version": "1.4.394", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.394.tgz", @@ -2619,6 +2702,17 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -3274,6 +3368,14 @@ "node": ">=8" } }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "bin": { + "he": "bin/he" + } + }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -4547,6 +4649,26 @@ } } }, + "node_modules/node-html-markdown": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/node-html-markdown/-/node-html-markdown-1.3.0.tgz", + "integrity": "sha512-OeFi3QwC/cPjvVKZ114tzzu+YoR+v9UXW5RwSXGUqGb0qCl0DvP406tzdL7SFn8pZrMyzXoisfG2zcuF9+zw4g==", + "dependencies": { + "node-html-parser": "^6.1.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/node-html-parser": { + "version": "6.1.12", + "resolved": "https://registry.npmjs.org/node-html-parser/-/node-html-parser-6.1.12.tgz", + "integrity": "sha512-/bT/Ncmv+fbMGX96XG9g05vFt43m/+SYKIs9oAemQVYyVcZmDAI2Xq/SbNcpOA35eF0Zk2av3Ksf+Xk8Vt8abA==", + "dependencies": { + "css-select": "^5.1.0", + "he": "1.2.0" + } + }, "node_modules/node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", @@ -4580,6 +4702,17 @@ "node": ">=8" } }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", diff --git a/package.json b/package.json index 3a9330ba..d02075f2 100644 --- a/package.json +++ b/package.json @@ -31,25 +31,26 @@ }, "homepage": "https://github.com/IQSS/dataverse-client-javascript#readme", "devDependencies": { + "@types/chai": "^4.3.4", "@types/jest": "^29.4.0", "@types/sinon": "^10.0.13", - "@types/chai": "^4.3.4", "@typescript-eslint/eslint-plugin": "^5.54.0", "@typescript-eslint/parser": "^5.52.0", + "chai": "^4.3.7", "eslint": "^8.35.0", "eslint-plugin-jest": "^27.2.1", "jest": "^29.4.3", - "chai": "^4.3.7", "prettier": "^2.8.4", - "ts-jest": "^29.0.5", "sinon": "^15.0.3", - "testcontainers": "^9.9.1" + "testcontainers": "^9.9.1", + "ts-jest": "^29.0.5" }, "dependencies": { "@types/node": "^18.15.11", "@types/turndown": "^5.0.1", "axios": "^1.3.4", - "typescript": "^4.9.5", - "turndown": "^7.1.2" + "node-html-markdown": "^1.3.0", + "turndown": "^7.1.2", + "typescript": "^4.9.5" } } diff --git a/src/datasets/infra/repositories/transformers/datasetTransformers.ts b/src/datasets/infra/repositories/transformers/datasetTransformers.ts index 758e832d..b7e614bd 100644 --- a/src/datasets/infra/repositories/transformers/datasetTransformers.ts +++ b/src/datasets/infra/repositories/transformers/datasetTransformers.ts @@ -8,9 +8,7 @@ import { DatasetMetadataBlocks, } from '../../../domain/models/Dataset'; import { AxiosResponse } from 'axios'; -import TurndownService from 'turndown'; - -const turndownService = new TurndownService(); +import { NodeHtmlMarkdown } from 'node-html-markdown'; export const transformVersionResponseToDataset = (response: AxiosResponse): Dataset => { const versionPayload = response.data.data; @@ -104,5 +102,5 @@ const transformPayloadToDatasetMetadataFieldValue = (metadataFieldValuePayload: }; const transformHtmlToMarkdown = (source: string): string => { - return turndownService.turndown(source); + return NodeHtmlMarkdown.translate(source); }; diff --git a/test/testHelpers/datasets/datasetHelper.ts b/test/testHelpers/datasets/datasetHelper.ts index b257a0b8..8415fce6 100644 --- a/test/testHelpers/datasets/datasetHelper.ts +++ b/test/testHelpers/datasets/datasetHelper.ts @@ -1,10 +1,8 @@ import { Dataset, DatasetVersionState, DatasetLicense } from '../../../src/datasets/domain/models/Dataset'; -import TurndownService from 'turndown'; +import { NodeHtmlMarkdown } from 'node-html-markdown'; import axios, { AxiosResponse } from 'axios'; import { TestConstants } from '../TestConstants'; -const turndownService = new TurndownService(); - const DATASET_CREATE_TIME_STR = '2023-05-15T08:21:01Z'; const DATASET_UPDATE_TIME_STR = '2023-05-15T08:21:03Z'; const DATASET_RELEASE_TIME_STR = '2023-05-15T08:21:03Z'; @@ -50,7 +48,7 @@ export const createDatasetModel = (license?: DatasetLicense): Dataset => { subject: ['Subject1', 'Subject2'], dsDescription: [ { - dsDescriptionValue: turndownService.turndown(DATASET_HTML_DESCRIPTION), + dsDescriptionValue: NodeHtmlMarkdown.translate(DATASET_HTML_DESCRIPTION), }, ], datasetContact: [ @@ -185,7 +183,7 @@ export const createDatasetVersionPayload = (license?: DatasetLicense): any => { return datasetPayload; }; -export const createDatasetLicenseModel = (withIconUri: boolean = true): DatasetLicense => { +export const createDatasetLicenseModel = (withIconUri = true): DatasetLicense => { const datasetLicense: DatasetLicense = { name: 'CC0 1.0', uri: 'https://creativecommons.org/publicdomain/zero/1.0/', From a687c63a5a9063c0fe9af06afaa8cf1eb959c6e4 Mon Sep 17 00:00:00 2001 From: MellyGray Date: Mon, 19 Feb 2024 15:50:41 +0100 Subject: [PATCH 92/96] fix: FileAccessStatus enum values --- src/files/domain/models/FileCriteria.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/files/domain/models/FileCriteria.ts b/src/files/domain/models/FileCriteria.ts index 3285624a..5ad26aa1 100644 --- a/src/files/domain/models/FileCriteria.ts +++ b/src/files/domain/models/FileCriteria.ts @@ -70,6 +70,6 @@ export enum FileOrderCriteria { export enum FileAccessStatus { PUBLIC = 'Public', RESTRICTED = 'Restricted', - EMBARGOED = 'EmbargoedThenRestricted', - EMBARGOED_RESTRICTED = 'EmbargoedThenPublic', + EMBARGOED = 'EmbargoedThenPublic', + EMBARGOED_RESTRICTED = 'EmbargoedThenRestricted', } From ea8f2a8dde1ef4414b2a71bc27e6b321c81a4de4 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 21 Feb 2024 12:55:33 +0000 Subject: [PATCH 93/96] Added: use case docs for CreateDataset --- docs/useCases.md | 71 ++++++++++++++++++- src/datasets/domain/useCases/CreateDataset.ts | 10 +++ 2 files changed, 78 insertions(+), 3 deletions(-) diff --git a/docs/useCases.md b/docs/useCases.md index 1b3497e0..cff79904 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -18,6 +18,8 @@ The different use cases currently available in the package are classified below, - [Get Dataset Summary Field Names](#get-dataset-summary-field-names) - [Get User Permissions on a Dataset](#get-user-permissions-on-a-dataset) - [List All Datasets](#list-all-datasets) + - [Datasets write use cases](#datasets-write-use-cases) + - [Create a Dataset](#create-a-dataset) - [Files](#Files) - [Files read use cases](#files-read-use-cases) - [Get a File](#get-a-file) @@ -235,6 +237,70 @@ Note that `limit` and `offset` are optional parameters for pagination. The `DatasetPreviewSubset`returned instance contains a property called `totalDatasetCount` which is necessary for pagination. +### Datasets Write Use Cases + +#### Create a Dataset + +Creates a new Dataset in a collection, given a [NewDatasetDTO](../src/datasets/domain/dtos/NewDatasetDTO.ts) object and an optional collection identifier, which defaults to `root`. + +This use case validates the submitted fields of each metadata block and can return errors of type [ResourceValidationError](../src/core/domain/useCases/validators/errors/ResourceValidationError.ts), which include sufficient information to determine which field value is invalid and why. + +##### Example call: + +```typescript +import { createDataset } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const newDatasetDTO: NewDatasetDTO = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'New Dataset', + author: [ + { + authorName: 'John Doe', + authorAffiliation: 'Dataverse', + }, + { + authorName: 'John Lee', + authorAffiliation: 'Dataverse', + }, + ], + datasetContact: [ + { + datasetContactEmail: 'johndoe@dataverse.com', + datasetContactName: 'John', + }, + ], + dsDescription: [ + { + dsDescriptionValue: 'This is the description of our new dataset', + }, + ], + subject: 'Earth and Environmental Sciences', + + /* Rest of field values... */ + + }, + }, + ], +}; + +createDataset.execute(newDatasetDTO).then((newDatasetIds: CreatedDatasetIdentifiers) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/CreateDataset.ts) implementation_. + +The above example creates the new dataset in the `root` collection since no collection identifier is specified. If you want to create the dataset in a different collection, you must add the collection identifier as a second parameter in the use case call. + +The use case returns a [CreatedDatasetIdentifiers](../src/datasets/domain/models/CreatedDatasetIdentifiers.ts) object, which includes the persistent and numeric identifiers of the created dataset. + ## Files ### Files read use cases @@ -593,11 +659,10 @@ _See [use case](../src/info/domain/useCases/GetDataverseVersion.ts) implementati #### Get Maximum Embargo Duration In Months -Returns a number indicating the configured maximum embargo duration in months. For information on the possible values -that can be returned, please refer to the `MaxEmbargoDurationInMonths` property in the Dataverse documentation: +Returns a number indicating the configured maximum embargo duration in months. For information on the possible values +that can be returned, please refer to the `MaxEmbargoDurationInMonths` property in the Dataverse documentation: [MaxEmbargoDurationInMonths](https://guides.dataverse.org/en/latest/installation/config.html#maxembargodurationinmonths). - ##### Example call: ```typescript diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts index ca352d82..4b71167f 100644 --- a/src/datasets/domain/useCases/CreateDataset.ts +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -21,6 +21,16 @@ export class CreateDataset implements UseCase { this.newDatasetValidator = newDatasetValidator; } + /** + * Creates a new Dataset in a collection, given a NewDatasetDTO object and an optional collection identifier, which defaults to root. + * + * @param {NewDatasetDTO} [newDataset] - NewDatasetDTO object including the new dataset metadata field values for each metadata block. + * @param {string} [collectionId] - Specifies the collection identifier where the new dataset should be created (optional, defaults to root). + * @returns {Promise} + * @throws {ResourceValidationError} - If there are validation errors related to the provided information. + * @throws {ReadError} - If there are errors while reading data. + * @throws {WriteError} - If there are errors while writing data. + */ async execute(newDataset: NewDatasetDTO, collectionId: string = 'root'): Promise { const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset); this.newDatasetValidator.validate(newDataset, metadataBlocks); From 3e71ba9b5ac2de7c0cb34dacfef536fcf3690613 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 21 Feb 2024 13:00:39 +0000 Subject: [PATCH 94/96] Fixed: GetDatasetCitation method docs --- src/datasets/domain/useCases/GetDatasetCitation.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/datasets/domain/useCases/GetDatasetCitation.ts b/src/datasets/domain/useCases/GetDatasetCitation.ts index fd158f84..11dbb089 100644 --- a/src/datasets/domain/useCases/GetDatasetCitation.ts +++ b/src/datasets/domain/useCases/GetDatasetCitation.ts @@ -15,7 +15,7 @@ export class GetDatasetCitation implements UseCase { * @param {number} [datasetId] - The dataset identifier. * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false - * @returns {Promise} + * @returns {Promise} */ async execute( datasetId: number, From c20cd3b0bd4da8346715036deebb687fbc34d233 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 21 Feb 2024 13:01:43 +0000 Subject: [PATCH 95/96] Added: use case docs for GetFileCitation --- docs/useCases.md | 26 ++++++++++++++++++++ src/files/domain/useCases/GetFileCitation.ts | 8 ++++++ 2 files changed, 34 insertions(+) diff --git a/docs/useCases.md b/docs/useCases.md index cff79904..850ab448 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -23,6 +23,7 @@ The different use cases currently available in the package are classified below, - [Files](#Files) - [Files read use cases](#files-read-use-cases) - [Get a File](#get-a-file) + - [Get File Citation Text](#get-file-citation-text) - [Get File Counts in a Dataset](#get-file-counts-in-a-dataset) - [Get File Data Tables](#get-file-data-tables) - [Get File Download Count](#get-file-download-count) @@ -332,6 +333,31 @@ The `fileId` parameter can be a string, for persistent identifiers, or a number, The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. +#### Get File Citation Text + +Returns the File citation text. + +##### Example call: + +```typescript +import { getFileCitation } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId = 3; +const datasetVersionId = '1.0'; + +getFileCitation.execute(fileId, datasetVersionId).then((citationText: string) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFileCitation.ts) implementation_. + +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the file search. If not set, the default value is `false`. + #### Get File Counts in a Dataset Returns an instance of [FileCounts](../src/files/domain/models/FileCounts.ts), containing the requested Dataset total file count, as well as file counts for the following file properties: diff --git a/src/files/domain/useCases/GetFileCitation.ts b/src/files/domain/useCases/GetFileCitation.ts index 2daccc1d..8c488662 100644 --- a/src/files/domain/useCases/GetFileCitation.ts +++ b/src/files/domain/useCases/GetFileCitation.ts @@ -9,6 +9,14 @@ export class GetFileCitation implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns the File citation text. + * + * @param {number} [fileId] - The dataset identifier. + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false + * @returns {Promise} + */ async execute( fileId: number, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, From c75139ec3c787d3d4dc1d450266ada9b13e74352 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 21 Feb 2024 13:04:19 +0000 Subject: [PATCH 96/96] Added: doc tweaks for GetFileCitation --- docs/useCases.md | 2 ++ src/files/domain/repositories/IFilesRepository.ts | 1 + src/files/domain/useCases/GetFileCitation.ts | 2 +- 3 files changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/useCases.md b/docs/useCases.md index 850ab448..b9812185 100644 --- a/docs/useCases.md +++ b/docs/useCases.md @@ -356,6 +356,8 @@ getFileCitation.execute(fileId, datasetVersionId).then((citationText: string) => _See [use case](../src/files/domain/useCases/GetFileCitation.ts) implementation_. +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the file search. If not set, the default value is `false`. #### Get File Counts in a Dataset diff --git a/src/files/domain/repositories/IFilesRepository.ts b/src/files/domain/repositories/IFilesRepository.ts index efd38b2d..e3f16127 100644 --- a/src/files/domain/repositories/IFilesRepository.ts +++ b/src/files/domain/repositories/IFilesRepository.ts @@ -39,5 +39,6 @@ export interface IFilesRepository { getFileDataTables(fileId: number | string): Promise; getFile(fileId: number | string, datasetVersionId: string): Promise; + getFileCitation(fileId: number | string, datasetVersionId: string, includeDeaccessioned: boolean): Promise; } diff --git a/src/files/domain/useCases/GetFileCitation.ts b/src/files/domain/useCases/GetFileCitation.ts index 8c488662..196968f0 100644 --- a/src/files/domain/useCases/GetFileCitation.ts +++ b/src/files/domain/useCases/GetFileCitation.ts @@ -12,7 +12,7 @@ export class GetFileCitation implements UseCase { /** * Returns the File citation text. * - * @param {number} [fileId] - The dataset identifier. + * @param {number | string} [fileId] - The File identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false * @returns {Promise}