diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d9c3ee2b..432eb098 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -3,31 +3,39 @@ First of all thank you very much for your interest in contributing to this project! ## Getting started -1. Make sure that you have installed the project dependencies -2. Build the project as explained in [README.md](README.md) -3. Fork the repository -4. Apply changes in your own branch -5. Create a pull request that we will review -6. Update README.md if necessary + +1. Fork the repository and clone your fork locally +2. Follow the [Local Development](./docs/localDevelopment.md) guide for setting up your local development environment +3. Create a branch and apply the desired changes on it +4. Create a pull request from your fork branch targeting the develop branch of the root repository ## Checklist before creating PR + - Project builds - Lint and format checks pass -- Unit tests pass -- Unit tests for new functionality/fix are added +- Unit and integration tests pass +- Unit and integration tests for new functionality/fix are added +- Documentation is updated (Any new use case added or modified should be documented in the [Use Cases](./docs/useCases.md) section) ## Code of Conduct + We abide by the upstream Code of Conduct at https://github.com/IQSS/dataverse/blob/develop/CODE_OF_CONDUCT.md and in addition ask the following. ### Git + - Branch names are self descriptive - Commit messages are short and concise - Branch is put up to date before creating PR ### Our responsibilities + - To keep the code clean - To provide constructive feedback to other developers - To maintain readable code at all times ## Getting help -Please feel free to reach out in https://chat.dataverse.org or https://groups.google.com/g/dataverse-dev \ No newline at end of file + +Please, do not hesitate to contact us through: + +- Zulip: https://dataverse.zulipchat.com/#narrow/stream/410361-ui-dev +- Google Group: https://groups.google.com/g/dataverse-dev diff --git a/README.md b/README.md index 85e21130..c88d7b79 100644 --- a/README.md +++ b/README.md @@ -1,85 +1,11 @@ -# dataverse-client-javascript +# js-dataverse [![npm](https://img.shields.io/npm/v/js-dataverse.svg)](https://www.npmjs.com/package/js-dataverse) A JavaScript/TypeScript API wrapper for [Dataverse](http://guides.dataverse.org/en/latest/api/). -## NPM - -A stable 1.x version of this package is available as `js-dataverse` at https://www.npmjs.com/package/js-dataverse - -An unstable 2.x version of this package with breaking changes is under development. Until a 2.0 version is officially released, it can be installed from https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript - -## Getting Started - -This package is built using `node v19`, so it is recommended to use that version. - -Make sure that you install all the project dependencies: - -`npm install` - -## Build project - -In order to build the project, we need to run the following command: - -`npm run build` - -the build generated will be placed in `dist` folder. - -## Tests - -### Run all tests - -`npm run test` - -### Run unit tests - -`npm run test:unit` - -### Run integration tests - -`npm run test:integration` - -#### Configure the integration testing environment - -The integration testing environment is implemented with Test Containers and Docker Compose. The environment uses different environment variables, defined in a .env file, available in the _test/integration/environment_ folder. - -These environment variables can be updated as needed for integration testing. For example, we can specify the Dataverse image registry and tag, to point to the particular Dataverse image to test. - -- To test images generated in Dataverse PRs: Set `ghcr.io` as the image registry (DATAVERSE_IMAGE_REGISTRY) and the source branch name of a particular PR as the image tag (DATAVERSE_IMAGE_TAG). - -- To test the Dataverse develop branch: Set `docker.io` as the image registry (DATAVERSE_IMAGE_REGISTRY) and `unstable` as the image tag (DATAVERSE_IMAGE_TAG). - -### Run test coverage - -`npm run test:coverage` - -## Format and lint - -### Run formatter - -`npm run format` - -### Run linter - -Running a linting check on the code: - -`npm run lint` - -Fix linting checks on the code: - -`npm run lint:fix` - -## Publishing new version - -Automated publishing of versions could be automated when merging to master. Below are the steps that would be required to publish a new version: - -1. Run tests and checks -2. Build the project -3. Commit changes -4. Upgrade npm version -5. Publish, `npm publish` - -## Contributing - -We love contributors! Please see [CONTRIBUTING.md](CONTRIBUTING.md). +- [Installation](./docs/installation.md) +- [Use Cases](./docs/useCases.md) +- [Local Development](./docs/localDevelopment.md) +- [Contributing](./CONTRIBUTING.md) +- [License](./LICENSE) diff --git a/docs/installation.md b/docs/installation.md new file mode 100644 index 00000000..df3efac0 --- /dev/null +++ b/docs/installation.md @@ -0,0 +1,108 @@ +# Installation + +Recommended versions node >=16 and npm >=8. + +## Getting Started with the Stable Version + +A stable 1.x version of this package is available as `js-dataverse` at https://www.npmjs.com/package/js-dataverse + +Install the package stable version using npm: + +```bash +npm install js-dataverse +``` + +## Getting Started with the Development Version + +An unstable 2.x version of this package with breaking changes is under development. + +Until a 2.0 version is officially released, it can be installed from https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript + + +### Create a `.npmrc` file and add a token + +To install the [@iqss/dataverse-client-javascript](https://github.com/IQSS/dataverse-client-javascript/pkgs/npm/dataverse-client-javascript) +from the GitHub registry, follow these steps to create an `.npmrc` file in the root of your project using your GitHub token. + +1. **Create `.npmrc`** in your project's root directory. + + ```bash + touch .npmrc + ``` + +2. **Replace the Token** + + Open the newly created `.npmrc` file and replace `YOUR_GITHUB_TOKEN` with your actual GitHub token. + + ```plaintext + legacy-peer-deps=true + + //npm.pkg.github.com/:_authToken= + @iqss:registry=https://npm.pkg.github.com/ + ``` + +#### How to Get a GitHub Token + +If you don't have a GitHub token yet, follow these steps: + +1. Go to your GitHub account settings. + +2. Navigate to "Developer settings" -> "Personal access tokens." + +3. Click "Personal access tokens" -> "Tokens (classic)" -> "Generate new token (classic)". + +4. Give the token a name and select the "read:packages" scope. + +5. Copy the generated token. + +6. Replace `YOUR_GITHUB_AUTH_TOKEN` in the `.npmrc` file with the copied token. + +Now, you should be able to install the Dataverse JavaScript client using npm. + +### Install the package + +Install the package development version using npm: + +```bash +npm install @iqss/dataverse-client-javascript +``` + +## Initialization + +In order for the package to connect to the Dataverse API, there is an `APIConfig` object that should be initialized to set the preferred authentication mechanism with the associated credentials for connecting to the Dataverse API. + +Currently, the supported authentication mechanisms are: + +- **API Key**: The recommended authentication mechanism. The API Key should correspond to a particular Dataverse user account. + +- **Session Cookie**: This is an experimental feature primarily designed for Dataverse SPA development. To use this mechanism, you must enable the corresponding feature flag in the Dataverse installation (See https://guides.dataverse.org/en/latest/installation/config.html?#feature-flags). It is recommended not to use this mechanism and instead use API Key authentication. + +It is recommended to globally initialize the `ApiConfig` object from the consuming application, as the configuration will be read on every API call made by the package's use cases. + +For example, in a React application, we can globally initialize the `ApiConfig` object in the `App` file, like this: + +```typescript +ApiConfig.init(, DataverseApiAuthMechanism.API_KEY, ) + +function App() { + /* Yor App code */ +} + +export default App +```` + +The same example but with example values set: + +```typescript +ApiConfig.init('http://localhost:8000/api/v1', DataverseApiAuthMechanism.API_KEY, 'xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx') + +function App() { + /* Yor App code */ +} + +export default App +```` + +We can initialize the `ApiConfig` object as an unauthenticated user, by setting `undefined` as the API Key value. + +This will allow use cases that do not require authentication to be successfully executed, but those that do require authentication will fail. diff --git a/docs/localDevelopment.md b/docs/localDevelopment.md new file mode 100644 index 00000000..9527a778 --- /dev/null +++ b/docs/localDevelopment.md @@ -0,0 +1,95 @@ +# Local Development + +To set up your local development environment for working on this project, follow these steps: + +## Prerequisites + +### Node.js and npm + +Make sure you have Node.js and npm installed on your machine. + +This package is built using `node v19`, so it is recommended to use that version. + +### Docker and Docker Compose + +We use [Test Containers](https://github.com/testcontainers/testcontainers-node) for running integration tests. + +In our Test Containers setup we use Docker Compose, as our tests involve multiple containers that need to be orchestrated together. + +If you want to run integration tests, you need Docker and Docker Compose installed on your machine. + +## Install Dependencies + +Make sure that you install all the project dependencies: + +```bash +npm install +``` + +## Build + +In order to build the project, we need to run the following command: + +```bash +npm run build +``` + +the build generated will be placed in `dist` folder. + +## Tests + +### Run all tests + +```bash +npm run test +``` + +### Run unit tests + +```bash +npm run test:unit +``` + +### Run integration tests + +```bash +npm run test:integration +``` + +#### Configure the integration testing environment + +The integration testing environment uses different environment variables, defined in a .env file, available in the _test/integration/environment_ folder. + +These environment variables can be updated as needed for integration testing. For example, we can specify the Dataverse image registry and tag, to point to the particular Dataverse image to test. + +- To test images generated in Dataverse PRs: Set `ghcr.io` as the image registry (DATAVERSE_IMAGE_REGISTRY) and the source branch name of a particular PR as the image tag (DATAVERSE_IMAGE_TAG). + +- To test the Dataverse develop branch: Set `docker.io` as the image registry (DATAVERSE_IMAGE_REGISTRY) and `unstable` as the image tag (DATAVERSE_IMAGE_TAG). + +### Run test coverage + +```bash +npm run test:coverage +``` + +## Format and lint + +### Run formatter + +```bash +npm run format +``` + +### Run linter + +Running a linting check on the code: + +```bash +npm run lint +``` + +Fix linting checks on the code: + +```bash +npm run lint:fix +``` \ No newline at end of file diff --git a/docs/useCases.md b/docs/useCases.md new file mode 100644 index 00000000..ebe0aee5 --- /dev/null +++ b/docs/useCases.md @@ -0,0 +1,731 @@ +# Use Cases + +In the context of [Domain-Driven Design (DDD)](https://martinfowler.com/bliki/DomainDrivenDesign.html), a use case is a specific way to describe and capture a user's or system's interaction with the domain to achieve a particular goal. + +This package exposes the functionality in the form of use cases, with the main goal that any package consumer can easily identify the desired functionality. + +The different use cases currently available in the package are classified below, according to the subdomains they target: + +## Table of Contents + +- [Datasets](#Datasets) + - [Datasets read use cases](#datasets-read-use-cases) + - [Get a Dataset](#get-a-dataset) + - [Get Dataset By Private URL Token](#get-dataset-by-private-url-token) + - [Get Dataset Citation Text](#get-dataset-citation-text) + - [Get Dataset Citation Text By Private URL Token](#get-dataset-citation-text-by-private-url-token) + - [Get Dataset Locks](#get-dataset-locks) + - [Get Dataset Summary Field Names](#get-dataset-summary-field-names) + - [Get User Permissions on a Dataset](#get-user-permissions-on-a-dataset) + - [List All Datasets](#list-all-datasets) + - [Datasets write use cases](#datasets-write-use-cases) + - [Create a Dataset](#create-a-dataset) +- [Files](#Files) + - [Files read use cases](#files-read-use-cases) + - [Get a File](#get-a-file) + - [Get File Citation Text](#get-file-citation-text) + - [Get File Counts in a Dataset](#get-file-counts-in-a-dataset) + - [Get File Data Tables](#get-file-data-tables) + - [Get File Download Count](#get-file-download-count) + - [Get the size of Downloading all the files of a Dataset Version](#get-the-size-of-downloading-all-the-files-of-a-dataset-version) + - [Get User Permissions on a File](#get-user-permissions-on-a-file) + - [List Files in a Dataset](#list-files-in-a-dataset) +- [Metadata Blocks](#metadata-blocks) + - [Metadata Blocks read use cases](#metadata-blocks-read-use-cases) + - [Get Metadata Block By Name](#get-metadata-block-by-name) +- [Users](#Users) + - [Users read use cases](#users-read-use-cases) + - [Get Current Authenticated User](#get-current-authenticated-user) +- [Info](#Info) + - [Get Dataverse Backend Version](#get-dataverse-backend-version) + - [Get Maximum Embargo Duration In Months](#get-maximum-embargo-duration-in-months) + - [Get ZIP Download Limit](#get-zip-download-limit) + +## Datasets + +### Datasets Read Use Cases + +#### Get a Dataset + +Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given the search parameters to identify it. + +##### Example call: + +```typescript +import { getDataset } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId = 'doi:10.77777/FK2/AAAAAA'; +const datasetVersionId = '1.0'; + +getDataset.execute(datasetId, datasetVersionId).then((dataset: Dataset) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetDataset.ts)_ definition. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. + +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + +#### Get Dataset By Private URL Token + +Returns a [Dataset](../src/datasets/domain/models/Dataset.ts) instance, given an associated Private URL Token. + +```typescript +import { getPrivateUrlDataset } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const token = 'a56444bc-7697-4711-8964-e0577f055fd2'; + +getPrivateUrlDataset.execute(token).then((dataset: Dataset) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetPrivateUrlDataset.ts)_ definition. + +#### Get Dataset Citation Text + +Returns the Dataset citation text. + +##### Example call: + +```typescript +import { getDatasetCitation } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId = 2; +const datasetVersionId = '1.0'; + +getDatasetCitation.execute(datasetId, datasetVersionId).then((citationText: string) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetCitation.ts) implementation_. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + +#### Get Dataset Citation Text By Private URL Token + +Returns the Dataset citation text, given an associated Private URL Token. + +##### Example call: + +```typescript +import { getPrivateUrlDatasetCitation } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const token = 'a56444bc-7697-4711-8964-e0577f055fd2'; + +getPrivateUrlDatasetCitation.execute(token).then((citationText: string) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts) implementation_. + +#### Get Dataset Locks + +Returns a [DatasetLock](../src/datasets/domain/models/DatasetLock.ts) array of all locks present in a Dataset. + +##### Example call: + +```typescript +import { getDatasetLocks } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId = 'doi:10.77777/FK2/AAAAAA'; + +getDatasetLocks.execute(datasetId).then((datasetLocks: DatasetLock[]) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetLocks.ts) implementation_. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +#### Get Dataset Summary Field Names + +Returns the names of the dataset summary fields configured in the installation. + +##### Example call: + +```typescript +import { getDatasetSummaryFieldNames } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getDatasetSummaryFieldNames.execute().then((names: string[]) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts) implementation_. + +#### Get User Permissions on a Dataset + +Returns an instance of [DatasetUserPermissions](../src/datasets/domain/models/DatasetUserPermissions.ts) that includes the permissions that the calling user has on a particular Dataset. + +##### Example call: + +```typescript +import { getDatasetUserPermissions } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId = 'doi:10.77777/FK2/AAAAAA'; + +getDatasetUserPermissions.execute(datasetId).then((permissions: DatasetUserPermissions) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetDatasetUserPermissions.ts) implementation_. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +#### List All Datasets + +Returns an instance of [DatasetPreviewSubset](../src/datasets/domain/models/DatasetPreviewSubset.ts) that contains reduced information for each dataset that the calling user can access in the installation. + +##### Example call: + +```typescript +import { getAllDatasetPreviews } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const limit = 10; +const offset = 20; +const collectionId = 'subcollection1'; + +getAllDatasetPreviews.execute(limit, offset, collectionId).then((subset: DatasetPreviewSubset) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/GetAllDatasetPreviews.ts) implementation_. + +Note that `limit` and `offset` are optional parameters for pagination. + +Note that `collectionId` is an optional parameter to filter datasets by collection. If not set, the default value is `root`. + +The `DatasetPreviewSubset`returned instance contains a property called `totalDatasetCount` which is necessary for pagination. + +### Datasets Write Use Cases + +#### Create a Dataset + +Creates a new Dataset in a collection, given a [NewDatasetDTO](../src/datasets/domain/dtos/NewDatasetDTO.ts) object and an optional collection identifier, which defaults to `root`. + +This use case validates the submitted fields of each metadata block and can return errors of type [ResourceValidationError](../src/core/domain/useCases/validators/errors/ResourceValidationError.ts), which include sufficient information to determine which field value is invalid and why. + +##### Example call: + +```typescript +import { createDataset } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const newDatasetDTO: NewDatasetDTO = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'New Dataset', + author: [ + { + authorName: 'John Doe', + authorAffiliation: 'Dataverse', + }, + { + authorName: 'John Lee', + authorAffiliation: 'Dataverse', + }, + ], + datasetContact: [ + { + datasetContactEmail: 'johndoe@dataverse.com', + datasetContactName: 'John', + }, + ], + dsDescription: [ + { + dsDescriptionValue: 'This is the description of our new dataset', + }, + ], + subject: 'Earth and Environmental Sciences', + + /* Rest of field values... */ + + }, + }, + ], +}; + +createDataset.execute(newDatasetDTO).then((newDatasetIds: CreatedDatasetIdentifiers) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/datasets/domain/useCases/CreateDataset.ts) implementation_. + +The above example creates the new dataset in the `root` collection since no collection identifier is specified. If you want to create the dataset in a different collection, you must add the collection identifier as a second parameter in the use case call. + +The use case returns a [CreatedDatasetIdentifiers](../src/datasets/domain/models/CreatedDatasetIdentifiers.ts) object, which includes the persistent and numeric identifiers of the created dataset. + +## Files + +### Files read use cases + +#### Get a File + +Returns a [File](../src/files/domain/models/File.ts) instance, given the search parameters to identify it. + +##### Example call: + +```typescript +import { getFile } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId = 2; +const datasetVersionId = '1.0'; + +getFile.execute(fileId, datasetVersionId).then((file: File) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFile.ts)_ definition. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. + +#### Get File Citation Text + +Returns the File citation text. + +##### Example call: + +```typescript +import { getFileCitation } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId = 3; +const datasetVersionId = '1.0'; + +getFileCitation.execute(fileId, datasetVersionId).then((citationText: string) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFileCitation.ts) implementation_. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the file search. If not set, the default value is `false`. + +#### Get File Counts in a Dataset + +Returns an instance of [FileCounts](../src/files/domain/models/FileCounts.ts), containing the requested Dataset total file count, as well as file counts for the following file properties: + +- **Per content type** +- **Per category name** +- **Per tabular tag name** +- **Per access status** (Possible values: _Public_, _Restricted_, _EmbargoedThenRestricted_, _EmbargoedThenPublic_) + +##### Example call: + +```typescript +import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId = 2; +const datasetVersionId = '1.0'; + +getDatasetFileCounts.execute(datasetId, datasetVersionId).then((fileCounts: FileCounts) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetDatasetFileCounts.ts) implementation_. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. +There is an optional third parameter called `includeDeaccessioned`, which indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + +An optional fourth parameter `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to retrieve counts only for files that match the specified criteria. + +##### Example call using optional parameters: + +```typescript +import { getDatasetFileCounts } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; +const includeDeaccessioned: boolean = true; +const searchCriteria: FileSearchCriteria = { + categoryName: 'physics', +}; + +getDatasetFileCounts + .execute(datasetId, datasetVersionId, includeDeaccessioned, searchCriteria) + .then((fileCounts: FileCounts) => { + /* ... */ + }); + +/* ... */ +``` + +#### Get File Data Tables + +This use case is oriented toward tabular files and provides an array of [FileDataTable](../src/files/domain/models/FileDataTable.ts) objects for an existing tabular file. + +##### Example call: + +```typescript +import { getFileDataTables } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId = 2; + +getFileDataTables.execute(fileId).then((dataTables: FileDataTable[]) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFileDataTables.ts) implementation_. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +#### Get File Download Count + +Provides the download count for a particular File. + +##### Example call: + +```typescript +import { getFileDownloadCount } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId: number = 2; + +getFileDownloadCount.execute(fileId).then((count: number) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFileDownloadCount.ts) implementation_. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +#### Get the size of Downloading all the files of a Dataset Version + +Returns the combined size in bytes of all the files available for download from a particular Dataset. + +##### Example call: + +```typescript +import { getDatasetFilesTotalDownloadSize } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; + +getDatasetFilesTotalDownloadSize.execute(datasetId, datasetVersionId).then((size: number) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts) implementation_. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. +There is a third optional parameter called `fileDownloadSizeMode` which receives an enum type of [FileDownloadSizeMode](../src/files/domain/models/FileDownloadSizeMode.ts), and applies a filter criteria to the operation. This parameter supports the following values: + +- `FileDownloadSizeMode.ALL` (Default): Includes both archival and original sizes for tabular files +- `FileDownloadSizeMode.ARCHIVAL`: Includes only the archival size for tabular files +- `FileDownloadSizeMode.ORIGINAL`: Includes only the original size for tabular files + +An optional fourth parameter called `fileSearchCriteria` receives a [FileSearchCriteria](../src/files/domain/models/FileCriteria.ts) object to only consider files that match the specified criteria. + +An optional fifth parameter called `includeDeaccessioned` indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. + +##### Example call using optional parameters: + +```typescript +import { getDatasetFilesTotalDownloadSize } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; +const mode: FileDownloadSizeMode = FileDownloadSizeMode.ARCHIVAL; +const searchCriteria: FileDownloadSizeMode = { + categoryName: 'physics', +}; +const includeDeaccessioned: boolean = true; + +getDatasetFilesTotalDownloadSize + .execute(datasetId, datasetVersionId, mode, searchCriteria, includeDeaccessioned) + .then((size: number) => { + /* ... */ + }); + +/* ... */ +``` + +#### Get User Permissions on a File + +This use case returns a [FileUserPermissions](../src/files/domain/models/FileUserPermissions.ts) object, which includes the permissions that the calling user has on a particular File. + +The returned _FileUserPermissions_ object contains the following permissions, as booleans: + +- Can download the file (_canDownloadFile_) +- Can manage the file permissions (_canManageFilePermissions_) +- Can edit the file owner dataset (_canEditOwnerDataset_) + +##### Example call: + +```typescript +import { getFileUserPermissions } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const fileId: number = 2; + +getFileUserPermissions.execute(fileId).then((permissions: FileUserPermissions) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetFileUserPermissions.ts) implementation_. + +The `fileId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. + +#### List Files in a Dataset + +Returns an instance of [FilesSubset](../src/files/domain/models/FilesSubset.ts), which contains the files from the requested Dataset and page (if pagination parameters are set). + +##### Example call: + +```typescript +import { getDatasetFiles } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId = 2; +const datasetVersionId = '1.0'; + +getDatasetFiles.execute(datasetId, datasetVersionId).then((subset: FilesSubset) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/files/domain/useCases/GetDatasetFiles.ts) implementation_. + +The `datasetId` parameter can be a string, for persistent identifiers, or a number, for numeric identifiers. +The optional `datasetVersionId` parameter can correspond to a numeric version identifier, as in the previous example, or a [DatasetNotNumberedVersion](../src/datasets/domain/models/DatasetNotNumberedVersion.ts) enum value. If not set, the default value is `DatasetNotNumberedVersion.LATEST`. +This use case supports the following optional parameters depending on the search goals: + +- **includeDeaccessioned**: (boolean) Indicates whether to consider deaccessioned versions or not in the dataset search. If not set, the default value is `false`. +- **limit**: (number) Limit for pagination. +- **offset**: (number) Offset for pagination. +- **fileSearchCriteria**: ([FileSearchCriteria](../src/files/domain/models/FileCriteria.ts)) Supports filtering the files by different file properties. +- **fileOrderCriteria**: ([FileOrderCriteria](../src/files/domain/models/FileCriteria.ts)) Supports ordering the results according to different criteria. If not set, the defalt value is `FileOrderCriteria.NAME_AZ`. + +##### Example call using optional parameters: + +```typescript +import { getDatasetFiles } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const datasetId: number = 2; +const datasetVersionId: string = '1.0'; +const includeDeaccessioned: boolean = true; +const limit: number = 10; +const offset: number = 20; +const searchCriteria: FileSearchCriteria = { + searchText: 'file title', +}; +const orderCriteria: FileOrderCriteria = FileOrderCriteria.NEWEST; + +getDatasetFiles + .execute(datasetId, datasetVersionId, includeDeaccessioned, limit, offset, searchCriteria, orderCriteria) + .then((subset: FilesSubset) => { + /* ... */ + }); + +/* ... */ +``` + +## Metadata Blocks + +### Metadata Blocks read use cases + +#### Get Metadata Block By Name + +Returns a [MetadataBlock](../src/metadataBlocks/domain/models/MetadataBlock.ts) instance, given its name. + +##### Example call: + +```typescript +import { getMetadataBlockByName } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +const name = 'citation'; + +getMetadataBlockByName.execute(name).then((metadataBlock: MetadataBlock) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts) implementation_. + +## Users + +### Users read use cases + +#### Get Current Authenticated User + +Returns the current [AuthenticatedUser](../src/users/domain/models/AuthenticatedUser.ts) corresponding to the authentication mechanism provided through `ApiConfig`. + +##### Example call: + +```typescript +import { getCurrentAuthenticatedUser } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getCurrentAuthenticatedUser.execute().then((user: AuthenticatedUser) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/users/domain/useCases/GetCurrentAuthenticatedUser.ts) implementation_. + +## Info + +#### Get Dataverse Backend Version + +Returns a [DataverseVersion](../src/info/domain/models/DataverseVersion.ts) object, which contains version information for the Dataverse backend installation. + +##### Example call: + +```typescript +import { getDataverseVersion } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getDataverseVersion.execute().then((version: DataverseVersion) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/info/domain/useCases/GetDataverseVersion.ts) implementation_. + +#### Get Maximum Embargo Duration In Months + +Returns a number indicating the configured maximum embargo duration in months. For information on the possible values +that can be returned, please refer to the `MaxEmbargoDurationInMonths` property in the Dataverse documentation: +[MaxEmbargoDurationInMonths](https://guides.dataverse.org/en/latest/installation/config.html#maxembargodurationinmonths). + +##### Example call: + +```typescript +import { getMaxEmbargoDurationInMonths } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getMaxEmbargoDurationInMonths.execute().then((months: number) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts) implementation_. + +#### Get ZIP Download Limit + +Returns a number indicating the configured ZIP download limit in bytes. + +##### Example call: + +```typescript +import { getZipDownloadLimit } from '@iqss/dataverse-client-javascript'; + +/* ... */ + +getZipDownloadLimit.execute().then((downloadLimit: number) => { + /* ... */ +}); + +/* ... */ +``` + +_See [use case](../src/info/domain/useCases/GetZipDownloadLimit.ts) implementation_. diff --git a/package-lock.json b/package-lock.json index d54b28f2..fed65deb 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,6 +12,7 @@ "@types/node": "^18.15.11", "@types/turndown": "^5.0.1", "axios": "^1.3.4", + "node-html-markdown": "^1.3.0", "turndown": "^7.1.2", "typescript": "^4.9.5" }, @@ -2027,6 +2028,11 @@ "readable-stream": "^3.4.0" } }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==" + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -2419,6 +2425,32 @@ "node": ">= 8" } }, + "node_modules/css-select": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.1.0.tgz", + "integrity": "sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==", + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==", + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, "node_modules/debug": { "version": "4.3.4", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", @@ -2581,11 +2613,62 @@ "node": ">=6.0.0" } }, + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ] + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, "node_modules/domino": { "version": "2.1.6", "resolved": "https://registry.npmjs.org/domino/-/domino-2.1.6.tgz", "integrity": "sha512-3VdM/SXBZX2omc9JF9nOPCtDaYQ67BGp5CoLpIQlO2KCAPETs8TcDHacF26jXadGbvUteZzRTeos2fhID5+ucQ==" }, + "node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, "node_modules/electron-to-chromium": { "version": "1.4.394", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.394.tgz", @@ -2619,6 +2702,17 @@ "once": "^1.4.0" } }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -3274,6 +3368,14 @@ "node": ">=8" } }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "bin": { + "he": "bin/he" + } + }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -4547,6 +4649,26 @@ } } }, + "node_modules/node-html-markdown": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/node-html-markdown/-/node-html-markdown-1.3.0.tgz", + "integrity": "sha512-OeFi3QwC/cPjvVKZ114tzzu+YoR+v9UXW5RwSXGUqGb0qCl0DvP406tzdL7SFn8pZrMyzXoisfG2zcuF9+zw4g==", + "dependencies": { + "node-html-parser": "^6.1.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/node-html-parser": { + "version": "6.1.12", + "resolved": "https://registry.npmjs.org/node-html-parser/-/node-html-parser-6.1.12.tgz", + "integrity": "sha512-/bT/Ncmv+fbMGX96XG9g05vFt43m/+SYKIs9oAemQVYyVcZmDAI2Xq/SbNcpOA35eF0Zk2av3Ksf+Xk8Vt8abA==", + "dependencies": { + "css-select": "^5.1.0", + "he": "1.2.0" + } + }, "node_modules/node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", @@ -4580,6 +4702,17 @@ "node": ">=8" } }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", diff --git a/package.json b/package.json index 3a9330ba..d02075f2 100644 --- a/package.json +++ b/package.json @@ -31,25 +31,26 @@ }, "homepage": "https://github.com/IQSS/dataverse-client-javascript#readme", "devDependencies": { + "@types/chai": "^4.3.4", "@types/jest": "^29.4.0", "@types/sinon": "^10.0.13", - "@types/chai": "^4.3.4", "@typescript-eslint/eslint-plugin": "^5.54.0", "@typescript-eslint/parser": "^5.52.0", + "chai": "^4.3.7", "eslint": "^8.35.0", "eslint-plugin-jest": "^27.2.1", "jest": "^29.4.3", - "chai": "^4.3.7", "prettier": "^2.8.4", - "ts-jest": "^29.0.5", "sinon": "^15.0.3", - "testcontainers": "^9.9.1" + "testcontainers": "^9.9.1", + "ts-jest": "^29.0.5" }, "dependencies": { "@types/node": "^18.15.11", "@types/turndown": "^5.0.1", "axios": "^1.3.4", - "typescript": "^4.9.5", - "turndown": "^7.1.2" + "node-html-markdown": "^1.3.0", + "turndown": "^7.1.2", + "typescript": "^4.9.5" } } diff --git a/src/core/domain/useCases/validators/NewResourceValidator.ts b/src/core/domain/useCases/validators/NewResourceValidator.ts new file mode 100644 index 00000000..2abb70ae --- /dev/null +++ b/src/core/domain/useCases/validators/NewResourceValidator.ts @@ -0,0 +1,6 @@ +import { ResourceValidationError } from './errors/ResourceValidationError'; + +export interface NewResourceValidator { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + validate(...args: any[]): Promise; +} diff --git a/src/core/domain/useCases/validators/errors/ResourceValidationError.ts b/src/core/domain/useCases/validators/errors/ResourceValidationError.ts new file mode 100644 index 00000000..4d4f7cbb --- /dev/null +++ b/src/core/domain/useCases/validators/errors/ResourceValidationError.ts @@ -0,0 +1,5 @@ +export class ResourceValidationError extends Error { + constructor(message: string) { + super(message); + } +} diff --git a/src/core/infra/repositories/ApiRepository.ts b/src/core/infra/repositories/ApiRepository.ts index a58fe658..f0eafbb9 100644 --- a/src/core/infra/repositories/ApiRepository.ts +++ b/src/core/infra/repositories/ApiRepository.ts @@ -56,7 +56,9 @@ export abstract class ApiRepository { requestConfig.withCredentials = true; break; case DataverseApiAuthMechanism.API_KEY: - requestConfig.headers['X-Dataverse-Key'] = ApiConfig.dataverseApiKey; + if (typeof ApiConfig.dataverseApiKey !== 'undefined') { + requestConfig.headers['X-Dataverse-Key'] = ApiConfig.dataverseApiKey; + } break; } return requestConfig; diff --git a/src/datasets/domain/dtos/NewDatasetDTO.ts b/src/datasets/domain/dtos/NewDatasetDTO.ts new file mode 100644 index 00000000..5d6cdaff --- /dev/null +++ b/src/datasets/domain/dtos/NewDatasetDTO.ts @@ -0,0 +1,21 @@ +import { DatasetLicense } from '../models/Dataset'; + +export interface NewDatasetDTO { + license?: DatasetLicense; + metadataBlockValues: NewDatasetMetadataBlockValuesDTO[]; +} + +export interface NewDatasetMetadataBlockValuesDTO { + name: string; + fields: NewDatasetMetadataFieldsDTO; +} + +export type NewDatasetMetadataFieldsDTO = Record; + +export type NewDatasetMetadataFieldValueDTO = + | string + | string[] + | NewDatasetMetadataChildFieldValueDTO + | NewDatasetMetadataChildFieldValueDTO[]; + +export type NewDatasetMetadataChildFieldValueDTO = Record; diff --git a/src/datasets/domain/models/CreatedDatasetIdentifiers.ts b/src/datasets/domain/models/CreatedDatasetIdentifiers.ts new file mode 100644 index 00000000..c4dc825a --- /dev/null +++ b/src/datasets/domain/models/CreatedDatasetIdentifiers.ts @@ -0,0 +1,4 @@ +export interface CreatedDatasetIdentifiers { + persistentId: string; + numericId: number; +} diff --git a/src/datasets/domain/repositories/IDatasetsRepository.ts b/src/datasets/domain/repositories/IDatasetsRepository.ts index 1aca4613..4aed91b3 100644 --- a/src/datasets/domain/repositories/IDatasetsRepository.ts +++ b/src/datasets/domain/repositories/IDatasetsRepository.ts @@ -2,6 +2,9 @@ import { Dataset } from '../models/Dataset'; import { DatasetUserPermissions } from '../models/DatasetUserPermissions'; import { DatasetLock } from '../models/DatasetLock'; import { DatasetPreviewSubset } from '../models/DatasetPreviewSubset'; +import { NewDatasetDTO } from '../dtos/NewDatasetDTO'; +import { MetadataBlock } from '../../../metadataBlocks'; +import { CreatedDatasetIdentifiers } from '../models/CreatedDatasetIdentifiers'; export interface IDatasetsRepository { getDatasetSummaryFieldNames(): Promise; @@ -12,4 +15,9 @@ export interface IDatasetsRepository { getDatasetUserPermissions(datasetId: number | string): Promise; getDatasetLocks(datasetId: number | string): Promise; getAllDatasetPreviews(limit?: number, offset?: number, collectionId?: string): Promise; + createDataset( + newDataset: NewDatasetDTO, + datasetMetadataBlocks: MetadataBlock[], + collectionId: string, + ): Promise; } diff --git a/src/datasets/domain/useCases/CreateDataset.ts b/src/datasets/domain/useCases/CreateDataset.ts new file mode 100644 index 00000000..4b71167f --- /dev/null +++ b/src/datasets/domain/useCases/CreateDataset.ts @@ -0,0 +1,49 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase'; +import { IDatasetsRepository } from '../repositories/IDatasetsRepository'; +import { NewDatasetDTO, NewDatasetMetadataBlockValuesDTO } from '../dtos/NewDatasetDTO'; +import { NewResourceValidator } from '../../../core/domain/useCases/validators/NewResourceValidator'; +import { IMetadataBlocksRepository } from '../../../metadataBlocks/domain/repositories/IMetadataBlocksRepository'; +import { MetadataBlock } from '../../../metadataBlocks'; +import { CreatedDatasetIdentifiers } from '../models/CreatedDatasetIdentifiers'; + +export class CreateDataset implements UseCase { + private datasetsRepository: IDatasetsRepository; + private metadataBlocksRepository: IMetadataBlocksRepository; + private newDatasetValidator: NewResourceValidator; + + constructor( + datasetsRepository: IDatasetsRepository, + metadataBlocksRepository: IMetadataBlocksRepository, + newDatasetValidator: NewResourceValidator, + ) { + this.datasetsRepository = datasetsRepository; + this.metadataBlocksRepository = metadataBlocksRepository; + this.newDatasetValidator = newDatasetValidator; + } + + /** + * Creates a new Dataset in a collection, given a NewDatasetDTO object and an optional collection identifier, which defaults to root. + * + * @param {NewDatasetDTO} [newDataset] - NewDatasetDTO object including the new dataset metadata field values for each metadata block. + * @param {string} [collectionId] - Specifies the collection identifier where the new dataset should be created (optional, defaults to root). + * @returns {Promise} + * @throws {ResourceValidationError} - If there are validation errors related to the provided information. + * @throws {ReadError} - If there are errors while reading data. + * @throws {WriteError} - If there are errors while writing data. + */ + async execute(newDataset: NewDatasetDTO, collectionId: string = 'root'): Promise { + const metadataBlocks = await this.getNewDatasetMetadataBlocks(newDataset); + this.newDatasetValidator.validate(newDataset, metadataBlocks); + return this.datasetsRepository.createDataset(newDataset, metadataBlocks, collectionId); + } + + async getNewDatasetMetadataBlocks(newDataset: NewDatasetDTO): Promise { + let metadataBlocks: MetadataBlock[] = []; + await Promise.all( + newDataset.metadataBlockValues.map(async (metadataBlockValue: NewDatasetMetadataBlockValuesDTO) => { + metadataBlocks.push(await this.metadataBlocksRepository.getMetadataBlockByName(metadataBlockValue.name)); + }), + ); + return metadataBlocks; + } +} diff --git a/src/datasets/domain/useCases/GetAllDatasetPreviews.ts b/src/datasets/domain/useCases/GetAllDatasetPreviews.ts index 1cb64e7d..5522553e 100644 --- a/src/datasets/domain/useCases/GetAllDatasetPreviews.ts +++ b/src/datasets/domain/useCases/GetAllDatasetPreviews.ts @@ -9,6 +9,14 @@ export class GetAllDatasetPreviews implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns an instance of DatasetPreviewSubset that contains reduced information for each dataset that the calling user can access in the installation. + * + * @param {number} [limit] - Limit for pagination (optional). + * @param {number} [offset] - Offset for pagination (optional). + * @param {string} [collectionId] - Collection id (optional). + * @returns {Promise} + */ async execute(limit?: number, offset?: number, collectionId?: string): Promise { return await this.datasetsRepository.getAllDatasetPreviews(limit, offset, collectionId); } diff --git a/src/datasets/domain/useCases/GetDataset.ts b/src/datasets/domain/useCases/GetDataset.ts index 873dd27c..ca3075a6 100644 --- a/src/datasets/domain/useCases/GetDataset.ts +++ b/src/datasets/domain/useCases/GetDataset.ts @@ -10,6 +10,14 @@ export class GetDataset implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns a Dataset instance, given the search parameters to identify it. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false + * @returns {Promise} + */ async execute( datasetId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, diff --git a/src/datasets/domain/useCases/GetDatasetCitation.ts b/src/datasets/domain/useCases/GetDatasetCitation.ts index f9751387..11dbb089 100644 --- a/src/datasets/domain/useCases/GetDatasetCitation.ts +++ b/src/datasets/domain/useCases/GetDatasetCitation.ts @@ -9,6 +9,14 @@ export class GetDatasetCitation implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns the Dataset citation text. + * + * @param {number} [datasetId] - The dataset identifier. + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false + * @returns {Promise} + */ async execute( datasetId: number, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, diff --git a/src/datasets/domain/useCases/GetDatasetLocks.ts b/src/datasets/domain/useCases/GetDatasetLocks.ts index f44dccda..b4164567 100644 --- a/src/datasets/domain/useCases/GetDatasetLocks.ts +++ b/src/datasets/domain/useCases/GetDatasetLocks.ts @@ -9,6 +9,12 @@ export class GetDatasetLocks implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns all locks present in a Dataset. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ async execute(datasetId: number | string): Promise { return await this.datasetsRepository.getDatasetLocks(datasetId); } diff --git a/src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts b/src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts index 4041e408..4b67afc3 100644 --- a/src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts +++ b/src/datasets/domain/useCases/GetDatasetSummaryFieldNames.ts @@ -8,6 +8,11 @@ export class GetDatasetSummaryFieldNames implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns the names of the dataset summary fields configured in the installation. + * + * @returns {Promise} + */ async execute(): Promise { return await this.datasetsRepository.getDatasetSummaryFieldNames(); } diff --git a/src/datasets/domain/useCases/GetDatasetUserPermissions.ts b/src/datasets/domain/useCases/GetDatasetUserPermissions.ts index 594e0ba6..13055977 100644 --- a/src/datasets/domain/useCases/GetDatasetUserPermissions.ts +++ b/src/datasets/domain/useCases/GetDatasetUserPermissions.ts @@ -9,6 +9,12 @@ export class GetDatasetUserPermissions implements UseCase} + */ async execute(datasetId: number | string): Promise { return await this.datasetsRepository.getDatasetUserPermissions(datasetId); } diff --git a/src/datasets/domain/useCases/GetPrivateUrlDataset.ts b/src/datasets/domain/useCases/GetPrivateUrlDataset.ts index 3580632c..2e05906d 100644 --- a/src/datasets/domain/useCases/GetPrivateUrlDataset.ts +++ b/src/datasets/domain/useCases/GetPrivateUrlDataset.ts @@ -9,6 +9,12 @@ export class GetPrivateUrlDataset implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns a Dataset instance, given an associated Private URL Token. + * + * @param {string} [token] - A Private URL token. + * @returns {Promise} + */ async execute(token: string): Promise { return await this.datasetsRepository.getPrivateUrlDataset(token); } diff --git a/src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts b/src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts index 7ff06f47..508376e8 100644 --- a/src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts +++ b/src/datasets/domain/useCases/GetPrivateUrlDatasetCitation.ts @@ -8,6 +8,12 @@ export class GetPrivateUrlDatasetCitation implements UseCase { this.datasetsRepository = datasetsRepository; } + /** + * Returns the Dataset citation text, given an associated Private URL Token. + * + * @param {string} [token] - A Private URL token. + * @returns {Promise} + */ async execute(token: string): Promise { return await this.datasetsRepository.getPrivateUrlDatasetCitation(token); } diff --git a/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts new file mode 100644 index 00000000..717f2b4c --- /dev/null +++ b/src/datasets/domain/useCases/validators/BaseMetadataFieldValidator.ts @@ -0,0 +1,29 @@ +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { FieldValidationError } from './errors/FieldValidationError'; +import { MetadataFieldInfo } from '../../../../metadataBlocks'; + +export interface NewDatasetMetadataFieldAndValueInfo { + metadataFieldInfo: MetadataFieldInfo; + metadataFieldKey: string; + metadataFieldValue: NewDatasetMetadataFieldValueDTO; + metadataBlockName: string; + metadataParentFieldKey?: string; + metadataFieldPosition?: number; +} + +export abstract class BaseMetadataFieldValidator { + abstract validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void; + + protected createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + reason: string, + ): FieldValidationError { + return new FieldValidationError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + reason, + ); + } +} diff --git a/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts new file mode 100644 index 00000000..35111901 --- /dev/null +++ b/src/datasets/domain/useCases/validators/MetadataFieldValidator.ts @@ -0,0 +1,51 @@ +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from './MultipleMetadataFieldValidator'; +import { SingleMetadataFieldValidator } from './SingleMetadataFieldValidator'; +import { EmptyFieldError } from './errors/EmptyFieldError'; +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; + +export class MetadataFieldValidator extends BaseMetadataFieldValidator { + constructor( + private singleMetadataFieldValidator: SingleMetadataFieldValidator, + private multipleMetadataFieldValidator: MultipleMetadataFieldValidator, + ) { + super(); + } + + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if ( + metadataFieldValue == undefined || + metadataFieldValue == null || + this.isEmptyString(metadataFieldValue) || + this.isEmptyArray(metadataFieldValue) + ) { + if (metadataFieldInfo.isRequired) { + throw new EmptyFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } else { + return; + } + } + if (newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.multiple) { + this.multipleMetadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); + } else { + this.singleMetadataFieldValidator.validate(newDatasetMetadataFieldAndValueInfo); + } + } + + private isEmptyString(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { + return typeof metadataFieldValue == 'string' && metadataFieldValue.trim() === ''; + } + + private isEmptyArray(metadataFieldValue: NewDatasetMetadataFieldValueDTO): boolean { + return ( + Array.isArray(metadataFieldValue) && (metadataFieldValue as Array).length == 0 + ); + } +} diff --git a/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts new file mode 100644 index 00000000..ae0be7fa --- /dev/null +++ b/src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator.ts @@ -0,0 +1,55 @@ +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; +import { NewDatasetMetadataFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { SingleMetadataFieldValidator } from './SingleMetadataFieldValidator'; + +export class MultipleMetadataFieldValidator extends BaseMetadataFieldValidator { + constructor(private singleMetadataFieldValidator: SingleMetadataFieldValidator) { + super(); + } + + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (!Array.isArray(metadataFieldValue)) { + throw this.createGeneralValidationError(newDatasetMetadataFieldAndValueInfo, 'Expecting an array of values.'); + } + if (this.isValidArrayType(metadataFieldValue, 'string') && metadataFieldInfo.type === 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting an array of child fields, not strings.', + ); + } else if (this.isValidArrayType(metadataFieldValue, 'object') && metadataFieldInfo.type !== 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting an array of strings, not child fields.', + ); + } else if ( + !this.isValidArrayType(metadataFieldValue, 'object') && + !this.isValidArrayType(metadataFieldValue, 'string') + ) { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'The provided array of values is not valid.', + ); + } + + const fieldValues = metadataFieldValue as NewDatasetMetadataFieldValueDTO[]; + fieldValues.forEach((value, metadataFieldPosition) => { + this.singleMetadataFieldValidator.validate({ + metadataFieldInfo: metadataFieldInfo, + metadataFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldValue: value, + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: metadataFieldPosition, + }); + }); + } + + private isValidArrayType( + metadataFieldValue: Array, + expectedType: 'string' | 'object', + ): boolean { + return metadataFieldValue.every((item: string | NewDatasetMetadataFieldValueDTO) => typeof item === expectedType); + } +} diff --git a/src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts b/src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts new file mode 100644 index 00000000..bb07efe6 --- /dev/null +++ b/src/datasets/domain/useCases/validators/NewDatasetResourceValidator.ts @@ -0,0 +1,33 @@ +import { NewDatasetDTO, NewDatasetMetadataBlockValuesDTO } from '../../dtos/NewDatasetDTO'; +import { NewResourceValidator } from '../../../../core/domain/useCases/validators/NewResourceValidator'; +import { MetadataBlock } from '../../../../metadataBlocks'; +import { ResourceValidationError } from '../../../../core/domain/useCases/validators/errors/ResourceValidationError'; +import { BaseMetadataFieldValidator } from './BaseMetadataFieldValidator'; + +export class NewDatasetResourceValidator implements NewResourceValidator { + constructor(private metadataFieldValidator: BaseMetadataFieldValidator) {} + + async validate(resource: NewDatasetDTO, metadataBlocks: MetadataBlock[]): Promise { + for (const metadataBlockValues of resource.metadataBlockValues) { + await this.validateMetadataBlock(metadataBlockValues, metadataBlocks); + } + } + + private async validateMetadataBlock( + metadataBlockValues: NewDatasetMetadataBlockValuesDTO, + metadataBlocks: MetadataBlock[], + ) { + const metadataBlockName = metadataBlockValues.name; + const metadataBlock: MetadataBlock = metadataBlocks.find( + (metadataBlock) => metadataBlock.name === metadataBlockName, + ); + for (const metadataFieldKey of Object.keys(metadataBlock.metadataFields)) { + this.metadataFieldValidator.validate({ + metadataFieldInfo: metadataBlock.metadataFields[metadataFieldKey], + metadataFieldKey: metadataFieldKey, + metadataFieldValue: metadataBlockValues.fields[metadataFieldKey], + metadataBlockName: metadataBlockName, + }); + } + } +} diff --git a/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts b/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts new file mode 100644 index 00000000..e338457c --- /dev/null +++ b/src/datasets/domain/useCases/validators/SingleMetadataFieldValidator.ts @@ -0,0 +1,95 @@ +import { BaseMetadataFieldValidator, NewDatasetMetadataFieldAndValueInfo } from './BaseMetadataFieldValidator'; +import { ControlledVocabularyFieldError } from './errors/ControlledVocabularyFieldError'; +import { DateFormatFieldError } from './errors/DateFormatFieldError'; +import { MetadataFieldValidator } from './MetadataFieldValidator'; +import { NewDatasetMetadataChildFieldValueDTO } from '../../dtos/NewDatasetDTO'; +import { MultipleMetadataFieldValidator } from './MultipleMetadataFieldValidator'; + +export class SingleMetadataFieldValidator extends BaseMetadataFieldValidator { + validate(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo): void { + const metadataFieldValue = newDatasetMetadataFieldAndValueInfo.metadataFieldValue; + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (Array.isArray(metadataFieldValue)) { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting a single field, not an array.', + ); + } + if (typeof metadataFieldValue === 'object' && metadataFieldInfo.type !== 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting a string, not child fields.', + ); + } + if (typeof metadataFieldValue === 'string' && metadataFieldInfo.type === 'NONE') { + throw this.createGeneralValidationError( + newDatasetMetadataFieldAndValueInfo, + 'Expecting child fields, not a string.', + ); + } + this.validateFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + private validateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + if (metadataFieldInfo.isControlledVocabulary) { + this.validateControlledVocabularyFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + if (metadataFieldInfo.type == 'DATE') { + this.validateDateFieldValue(newDatasetMetadataFieldAndValueInfo); + } + + if (metadataFieldInfo.childMetadataFields != undefined) { + this.validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo); + } + } + + private validateControlledVocabularyFieldValue( + newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo, + ) { + if ( + !newDatasetMetadataFieldAndValueInfo.metadataFieldInfo.controlledVocabularyValues.includes( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string, + ) + ) { + throw new ControlledVocabularyFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } + } + + private validateDateFieldValue(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const dateFormatRegex = /^\d{4}-\d{2}-\d{2}$/; + if (!dateFormatRegex.test(newDatasetMetadataFieldAndValueInfo.metadataFieldValue as string)) { + throw new DateFormatFieldError( + newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataBlockName, + newDatasetMetadataFieldAndValueInfo.metadataParentFieldKey, + newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + ); + } + } + + private validateChildMetadataFieldValues(newDatasetMetadataFieldAndValueInfo: NewDatasetMetadataFieldAndValueInfo) { + const metadataFieldInfo = newDatasetMetadataFieldAndValueInfo.metadataFieldInfo; + const childMetadataFieldKeys = Object.keys(metadataFieldInfo.childMetadataFields); + const metadataFieldValidator = new MetadataFieldValidator(this, new MultipleMetadataFieldValidator(this)); + for (const childMetadataFieldKey of childMetadataFieldKeys) { + const childMetadataFieldInfo = metadataFieldInfo.childMetadataFields[childMetadataFieldKey]; + metadataFieldValidator.validate({ + metadataFieldInfo: childMetadataFieldInfo, + metadataFieldKey: childMetadataFieldKey, + metadataFieldValue: ( + newDatasetMetadataFieldAndValueInfo.metadataFieldValue as NewDatasetMetadataChildFieldValueDTO + )[childMetadataFieldKey], + metadataBlockName: newDatasetMetadataFieldAndValueInfo.metadataBlockName, + metadataParentFieldKey: newDatasetMetadataFieldAndValueInfo.metadataFieldKey, + metadataFieldPosition: newDatasetMetadataFieldAndValueInfo.metadataFieldPosition, + }); + } + } +} diff --git a/src/datasets/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts b/src/datasets/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts new file mode 100644 index 00000000..b628f53f --- /dev/null +++ b/src/datasets/domain/useCases/validators/errors/ControlledVocabularyFieldError.ts @@ -0,0 +1,18 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class ControlledVocabularyFieldError extends FieldValidationError { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field does not have a valid controlled vocabulary value.', + ); + } +} diff --git a/src/datasets/domain/useCases/validators/errors/DateFormatFieldError.ts b/src/datasets/domain/useCases/validators/errors/DateFormatFieldError.ts new file mode 100644 index 00000000..a6b36fa5 --- /dev/null +++ b/src/datasets/domain/useCases/validators/errors/DateFormatFieldError.ts @@ -0,0 +1,18 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class DateFormatFieldError extends FieldValidationError { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field requires a valid date format (YYYY-MM-DD).', + ); + } +} diff --git a/src/datasets/domain/useCases/validators/errors/EmptyFieldError.ts b/src/datasets/domain/useCases/validators/errors/EmptyFieldError.ts new file mode 100644 index 00000000..e1ca1d7a --- /dev/null +++ b/src/datasets/domain/useCases/validators/errors/EmptyFieldError.ts @@ -0,0 +1,18 @@ +import { FieldValidationError } from './FieldValidationError'; + +export class EmptyFieldError extends FieldValidationError { + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + ) { + super( + metadataFieldName, + citationBlockName, + parentMetadataFieldName, + fieldPosition, + 'The field should not be empty.', + ); + } +} diff --git a/src/datasets/domain/useCases/validators/errors/FieldValidationError.ts b/src/datasets/domain/useCases/validators/errors/FieldValidationError.ts new file mode 100644 index 00000000..2c3c6a2e --- /dev/null +++ b/src/datasets/domain/useCases/validators/errors/FieldValidationError.ts @@ -0,0 +1,32 @@ +import { ResourceValidationError } from '../../../../../core/domain/useCases/validators/errors/ResourceValidationError'; + +export class FieldValidationError extends ResourceValidationError { + citationBlockName: string; + metadataFieldName: string; + parentMetadataFieldName?: string; + fieldPosition?: number; + + constructor( + metadataFieldName: string, + citationBlockName: string, + parentMetadataFieldName?: string, + fieldPosition?: number, + reason?: string, + ) { + let message = `There was an error when validating the field ${metadataFieldName} from metadata block ${citationBlockName}`; + if (parentMetadataFieldName) { + message += ` with parent field ${parentMetadataFieldName}`; + } + if (fieldPosition) { + message += ` in position ${fieldPosition}`; + } + if (reason) { + message += `. Reason was: ${reason}`; + } + super(message); + this.citationBlockName = citationBlockName; + this.metadataFieldName = metadataFieldName; + this.parentMetadataFieldName = parentMetadataFieldName; + this.fieldPosition = fieldPosition; + } +} diff --git a/src/datasets/index.ts b/src/datasets/index.ts index 07a1574a..b75784b6 100644 --- a/src/datasets/index.ts +++ b/src/datasets/index.ts @@ -7,6 +7,12 @@ import { GetPrivateUrlDatasetCitation } from './domain/useCases/GetPrivateUrlDat import { GetDatasetUserPermissions } from './domain/useCases/GetDatasetUserPermissions'; import { GetDatasetLocks } from './domain/useCases/GetDatasetLocks'; import { GetAllDatasetPreviews } from './domain/useCases/GetAllDatasetPreviews'; +import { NewDatasetResourceValidator } from './domain/useCases/validators/NewDatasetResourceValidator'; +import { MetadataBlocksRepository } from '../metadataBlocks/infra/repositories/MetadataBlocksRepository'; +import { CreateDataset } from './domain/useCases/CreateDataset'; +import { MetadataFieldValidator } from './domain/useCases/validators/MetadataFieldValidator'; +import { SingleMetadataFieldValidator } from './domain/useCases/validators/SingleMetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from './domain/useCases/validators/MultipleMetadataFieldValidator'; const datasetsRepository = new DatasetsRepository(); @@ -18,6 +24,16 @@ const getPrivateUrlDatasetCitation = new GetPrivateUrlDatasetCitation(datasetsRe const getDatasetUserPermissions = new GetDatasetUserPermissions(datasetsRepository); const getDatasetLocks = new GetDatasetLocks(datasetsRepository); const getAllDatasetPreviews = new GetAllDatasetPreviews(datasetsRepository); +const singleMetadataFieldValidator = new SingleMetadataFieldValidator(); +const metadataFieldValidator = new MetadataFieldValidator( + new SingleMetadataFieldValidator(), + new MultipleMetadataFieldValidator(singleMetadataFieldValidator), +); +const createDataset = new CreateDataset( + datasetsRepository, + new MetadataBlocksRepository(), + new NewDatasetResourceValidator(metadataFieldValidator), +); export { getDatasetSummaryFieldNames, @@ -28,6 +44,7 @@ export { getDatasetUserPermissions, getDatasetLocks, getAllDatasetPreviews, + createDataset, }; export { DatasetNotNumberedVersion } from './domain/models/DatasetNotNumberedVersion'; export { DatasetUserPermissions } from './domain/models/DatasetUserPermissions'; @@ -45,3 +62,11 @@ export { } from './domain/models/Dataset'; export { DatasetPreview } from './domain/models/DatasetPreview'; export { DatasetPreviewSubset } from './domain/models/DatasetPreviewSubset'; +export { + NewDatasetDTO as NewDataset, + NewDatasetMetadataBlockValuesDTO as NewDatasetMetadataBlockValues, + NewDatasetMetadataFieldsDTO as NewDatasetMetadataFields, + NewDatasetMetadataFieldValueDTO as NewDatasetMetadataFieldValue, + NewDatasetMetadataChildFieldValueDTO as NewDatasetMetadataChildFieldValue, +} from './domain/dtos/NewDatasetDTO'; +export { CreatedDatasetIdentifiers } from './domain/models/CreatedDatasetIdentifiers'; diff --git a/src/datasets/infra/repositories/DatasetsRepository.ts b/src/datasets/infra/repositories/DatasetsRepository.ts index 66e638e0..c0098c04 100644 --- a/src/datasets/infra/repositories/DatasetsRepository.ts +++ b/src/datasets/infra/repositories/DatasetsRepository.ts @@ -8,6 +8,10 @@ import { DatasetLock } from '../../domain/models/DatasetLock'; import { transformDatasetLocksResponseToDatasetLocks } from './transformers/datasetLocksTransformers'; import { transformDatasetPreviewsResponseToDatasetPreviewSubset } from './transformers/datasetPreviewsTransformers'; import { DatasetPreviewSubset } from '../../domain/models/DatasetPreviewSubset'; +import { NewDatasetDTO } from '../../domain/dtos/NewDatasetDTO'; +import { MetadataBlock } from '../../../metadataBlocks'; +import { transformNewDatasetModelToRequestPayload } from './transformers/newDatasetTransformers'; +import { CreatedDatasetIdentifiers } from '../../domain/models/CreatedDatasetIdentifiers'; export interface GetAllDatasetPreviewsQueryParams { per_page?: number; @@ -44,7 +48,7 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi true, { includeDeaccessioned: includeDeaccessioned, - includeFiles: false, + excludeFiles: true, }, ) .then((response) => transformVersionResponseToDataset(response)) @@ -114,4 +118,25 @@ export class DatasetsRepository extends ApiRepository implements IDatasetsReposi throw error; }); } + + public async createDataset( + newDataset: NewDatasetDTO, + datasetMetadataBlocks: MetadataBlock[], + collectionId: string, + ): Promise { + return this.doPost( + `/dataverses/${collectionId}/datasets`, + transformNewDatasetModelToRequestPayload(newDataset, datasetMetadataBlocks), + ) + .then((response) => { + const responseData = response.data.data; + return { + persistentId: responseData.persistentId, + numericId: responseData.id, + }; + }) + .catch((error) => { + throw error; + }); + } } diff --git a/src/datasets/infra/repositories/transformers/datasetTransformers.ts b/src/datasets/infra/repositories/transformers/datasetTransformers.ts index 758e832d..b7e614bd 100644 --- a/src/datasets/infra/repositories/transformers/datasetTransformers.ts +++ b/src/datasets/infra/repositories/transformers/datasetTransformers.ts @@ -8,9 +8,7 @@ import { DatasetMetadataBlocks, } from '../../../domain/models/Dataset'; import { AxiosResponse } from 'axios'; -import TurndownService from 'turndown'; - -const turndownService = new TurndownService(); +import { NodeHtmlMarkdown } from 'node-html-markdown'; export const transformVersionResponseToDataset = (response: AxiosResponse): Dataset => { const versionPayload = response.data.data; @@ -104,5 +102,5 @@ const transformPayloadToDatasetMetadataFieldValue = (metadataFieldValuePayload: }; const transformHtmlToMarkdown = (source: string): string => { - return turndownService.turndown(source); + return NodeHtmlMarkdown.translate(source); }; diff --git a/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts new file mode 100644 index 00000000..5ae104b5 --- /dev/null +++ b/src/datasets/infra/repositories/transformers/newDatasetTransformers.ts @@ -0,0 +1,140 @@ +import { + NewDatasetDTO, + NewDatasetMetadataBlockValuesDTO, + NewDatasetMetadataFieldsDTO, + NewDatasetMetadataFieldValueDTO, + NewDatasetMetadataChildFieldValueDTO, +} from '../../../domain/dtos/NewDatasetDTO'; +import { DatasetLicense } from '../../../domain/models/Dataset'; +import { MetadataBlock, MetadataFieldInfo } from '../../../../metadataBlocks'; + +export interface NewDatasetRequestPayload { + datasetVersion: { + license?: DatasetLicense; + metadataBlocks: Record; + }; +} + +export interface MetadataBlockRequestPayload { + fields: MetadataFieldRequestPayload[]; + displayName: string; +} + +export interface MetadataFieldRequestPayload { + value: MetadataFieldValueRequestPayload; + typeClass: string; + multiple: boolean; + typeName: string; +} + +export type MetadataFieldValueRequestPayload = + | string + | string[] + | Record + | Record[]; + +export const transformNewDatasetModelToRequestPayload = ( + newDataset: NewDatasetDTO, + metadataBlocks: MetadataBlock[], +): NewDatasetRequestPayload => { + return { + datasetVersion: { + ...(newDataset.license && { license: newDataset.license }), + metadataBlocks: transformMetadataBlockModelsToRequestPayload(newDataset.metadataBlockValues, metadataBlocks), + }, + }; +}; + +export const transformMetadataBlockModelsToRequestPayload = ( + newDatasetMetadataBlocksValues: NewDatasetMetadataBlockValuesDTO[], + metadataBlocks: MetadataBlock[], +): Record => { + let metadataBlocksRequestPayload: Record = {}; + newDatasetMetadataBlocksValues.forEach(function (newDatasetMetadataBlockValues: NewDatasetMetadataBlockValuesDTO) { + const metadataBlock: MetadataBlock = metadataBlocks.find( + (metadataBlock) => metadataBlock.name == newDatasetMetadataBlockValues.name, + ); + metadataBlocksRequestPayload[newDatasetMetadataBlockValues.name] = { + fields: transformMetadataFieldModelsToRequestPayload( + newDatasetMetadataBlockValues.fields, + metadataBlock.metadataFields, + ), + displayName: metadataBlock.displayName, + }; + }); + return metadataBlocksRequestPayload; +}; + +export const transformMetadataFieldModelsToRequestPayload = ( + newDatasetMetadataFields: NewDatasetMetadataFieldsDTO, + metadataBlockFields: Record, +): MetadataFieldRequestPayload[] => { + let metadataFieldsRequestPayload: MetadataFieldRequestPayload[] = []; + for (const metadataFieldKey of Object.keys(newDatasetMetadataFields)) { + const newDatasetMetadataChildFieldValue: NewDatasetMetadataFieldValueDTO = + newDatasetMetadataFields[metadataFieldKey]; + metadataFieldsRequestPayload.push({ + value: transformMetadataFieldValueToRequestPayload( + newDatasetMetadataChildFieldValue, + metadataBlockFields[metadataFieldKey], + ), + typeClass: metadataBlockFields[metadataFieldKey].typeClass, + multiple: metadataBlockFields[metadataFieldKey].multiple, + typeName: metadataFieldKey, + }); + } + return metadataFieldsRequestPayload; +}; + +export const transformMetadataFieldValueToRequestPayload = ( + newDatasetMetadataFieldValue: NewDatasetMetadataFieldValueDTO, + metadataBlockFieldInfo: MetadataFieldInfo, +): MetadataFieldValueRequestPayload => { + let value: MetadataFieldValueRequestPayload; + if (metadataBlockFieldInfo.multiple) { + const newDatasetMetadataChildFieldValues = newDatasetMetadataFieldValue as + | string[] + | NewDatasetMetadataChildFieldValueDTO[]; + if (typeof newDatasetMetadataChildFieldValues[0] == 'string') { + value = newDatasetMetadataFieldValue as string[]; + } else { + value = []; + (newDatasetMetadataChildFieldValues as NewDatasetMetadataChildFieldValueDTO[]).forEach(function ( + childMetadataFieldValue: NewDatasetMetadataChildFieldValueDTO, + ) { + (value as Record[]).push( + transformMetadataChildFieldValueToRequestPayload(childMetadataFieldValue, metadataBlockFieldInfo), + ); + }); + } + } else { + if (typeof newDatasetMetadataFieldValue == 'string') { + value = newDatasetMetadataFieldValue; + } else { + value = transformMetadataChildFieldValueToRequestPayload( + newDatasetMetadataFieldValue as NewDatasetMetadataChildFieldValueDTO, + metadataBlockFieldInfo, + ); + } + } + return value; +}; + +export const transformMetadataChildFieldValueToRequestPayload = ( + newDatasetMetadataChildFieldValue: NewDatasetMetadataChildFieldValueDTO, + metadataBlockFieldInfo: MetadataFieldInfo, +): Record => { + let metadataChildFieldRequestPayload: Record = {}; + for (const metadataChildFieldKey of Object.keys(newDatasetMetadataChildFieldValue)) { + const childMetadataFieldInfo: MetadataFieldInfo = metadataBlockFieldInfo.childMetadataFields[metadataChildFieldKey]; + const value: string = newDatasetMetadataChildFieldValue[metadataChildFieldKey] as unknown as string; + metadataChildFieldRequestPayload[metadataChildFieldKey] = { + value: value, + typeClass: childMetadataFieldInfo.typeClass, + multiple: childMetadataFieldInfo.multiple, + typeName: metadataChildFieldKey, + }; + } + + return metadataChildFieldRequestPayload; +}; diff --git a/src/files/domain/models/FileCriteria.ts b/src/files/domain/models/FileCriteria.ts index 3285624a..5ad26aa1 100644 --- a/src/files/domain/models/FileCriteria.ts +++ b/src/files/domain/models/FileCriteria.ts @@ -70,6 +70,6 @@ export enum FileOrderCriteria { export enum FileAccessStatus { PUBLIC = 'Public', RESTRICTED = 'Restricted', - EMBARGOED = 'EmbargoedThenRestricted', - EMBARGOED_RESTRICTED = 'EmbargoedThenPublic', + EMBARGOED = 'EmbargoedThenPublic', + EMBARGOED_RESTRICTED = 'EmbargoedThenRestricted', } diff --git a/src/files/domain/repositories/IFilesRepository.ts b/src/files/domain/repositories/IFilesRepository.ts index fc9b6dda..e3f16127 100644 --- a/src/files/domain/repositories/IFilesRepository.ts +++ b/src/files/domain/repositories/IFilesRepository.ts @@ -39,4 +39,6 @@ export interface IFilesRepository { getFileDataTables(fileId: number | string): Promise; getFile(fileId: number | string, datasetVersionId: string): Promise; + + getFileCitation(fileId: number | string, datasetVersionId: string, includeDeaccessioned: boolean): Promise; } diff --git a/src/files/domain/useCases/GetDatasetFileCounts.ts b/src/files/domain/useCases/GetDatasetFileCounts.ts index 2154100e..f164dcf9 100644 --- a/src/files/domain/useCases/GetDatasetFileCounts.ts +++ b/src/files/domain/useCases/GetDatasetFileCounts.ts @@ -11,6 +11,15 @@ export class GetDatasetFileCounts implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns an instance of FileCounts, containing the requested Dataset total file count, as well as file counts for different file properties. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false. + * @param {FileSearchCriteria} [fileSearchCriteria] - Supports filtering the files by different file properties (optional). + * @returns {Promise} + */ async execute( datasetId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, diff --git a/src/files/domain/useCases/GetDatasetFiles.ts b/src/files/domain/useCases/GetDatasetFiles.ts index 605ab542..642d7c18 100644 --- a/src/files/domain/useCases/GetDatasetFiles.ts +++ b/src/files/domain/useCases/GetDatasetFiles.ts @@ -11,6 +11,18 @@ export class GetDatasetFiles implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns an instance of FilesSubset, which contains the files from the requested Dataset and page (if pagination parameters are set). + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false. + * @param {number} [limit] - Limit for pagination (optional). + * @param {number} [offset] - Offset for pagination (optional). + * @param {FileSearchCriteria} [fileSearchCriteria] - Supports filtering the files by different file properties (optional). + * @param {FileOrderCriteria} [fileOrderCriteria=FileOrderCriteria.NAME_AZ] - Supports ordering the results according to different criteria. If not set, the defalt value is FileOrderCriteria.NAME_AZ. + * @returns {Promise} + */ async execute( datasetId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, diff --git a/src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts b/src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts index b582b551..53fdb25e 100644 --- a/src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts +++ b/src/files/domain/useCases/GetDatasetFilesTotalDownloadSize.ts @@ -11,6 +11,16 @@ export class GetDatasetFilesTotalDownloadSize implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns the combined size in bytes of all the files available for download from a particular Dataset. + * + * @param {number | string} [datasetId] - The dataset identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {FileDownloadSizeMode} [fileDownloadSizeMode=FileDownloadSizeMode.ALL] - Applies a filter mode to the operation to consider only archival sizes, original or both (all). The default value is FileDownloadSizeMode.ALL. + * @param {FileSearchCriteria} [fileSearchCriteria] - Supports filtering the files to obtain their combined size by different file properties (optional). + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false. + * @returns {Promise} + */ async execute( datasetId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, diff --git a/src/files/domain/useCases/GetFile.ts b/src/files/domain/useCases/GetFile.ts index 64f30052..6eb65991 100644 --- a/src/files/domain/useCases/GetFile.ts +++ b/src/files/domain/useCases/GetFile.ts @@ -5,6 +5,13 @@ import { DatasetNotNumberedVersion } from '../../../datasets'; export class GetFile { constructor(private readonly filesRepository: IFilesRepository) {} + /** + * Returns a File instance, given the search parameters to identify it. + * + * @param {number | string} [fileId] - The File identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @returns {Promise} + */ async execute( fileId: number | string, datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, diff --git a/src/files/domain/useCases/GetFileCitation.ts b/src/files/domain/useCases/GetFileCitation.ts new file mode 100644 index 00000000..196968f0 --- /dev/null +++ b/src/files/domain/useCases/GetFileCitation.ts @@ -0,0 +1,27 @@ +import { UseCase } from '../../../core/domain/useCases/UseCase'; +import { IFilesRepository } from '../repositories/IFilesRepository'; +import { DatasetNotNumberedVersion } from '../../../datasets'; + +export class GetFileCitation implements UseCase { + private filesRepository: IFilesRepository; + + constructor(filesRepository: IFilesRepository) { + this.filesRepository = filesRepository; + } + + /** + * Returns the File citation text. + * + * @param {number | string} [fileId] - The File identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @param {string | DatasetNotNumberedVersion} [datasetVersionId=DatasetNotNumberedVersion.LATEST] - The dataset version identifier, which can be a version-specific numeric string (for example, 1.0) or a DatasetNotNumberedVersion enum value. If this parameter is not set, the default value is: DatasetNotNumberedVersion.LATEST + * @param {boolean} [includeDeaccessioned=false] - Indicates whether to consider deaccessioned versions in the dataset search or not. The default value is false + * @returns {Promise} + */ + async execute( + fileId: number, + datasetVersionId: string | DatasetNotNumberedVersion = DatasetNotNumberedVersion.LATEST, + includeDeaccessioned = false, + ): Promise { + return await this.filesRepository.getFileCitation(fileId, datasetVersionId, includeDeaccessioned); + } +} diff --git a/src/files/domain/useCases/GetFileDataTables.ts b/src/files/domain/useCases/GetFileDataTables.ts index d8a65891..07ffdeda 100644 --- a/src/files/domain/useCases/GetFileDataTables.ts +++ b/src/files/domain/useCases/GetFileDataTables.ts @@ -9,6 +9,12 @@ export class GetFileDataTables implements UseCase { this.filesRepository = filesRepository; } + /** + * This use case is oriented toward tabular files and provides an array of FileDataTable objects for an existing tabular file. + * + * @param {number | string} [fileId] - The file identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ async execute(fileId: number | string): Promise { return await this.filesRepository.getFileDataTables(fileId); } diff --git a/src/files/domain/useCases/GetFileDownloadCount.ts b/src/files/domain/useCases/GetFileDownloadCount.ts index 4d2b41eb..e527e1ef 100644 --- a/src/files/domain/useCases/GetFileDownloadCount.ts +++ b/src/files/domain/useCases/GetFileDownloadCount.ts @@ -8,6 +8,12 @@ export class GetFileDownloadCount implements UseCase { this.filesRepository = filesRepository; } + /** + * Provides the download count for a particular File. + * + * @param {number | string} [fileId] - The file identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ async execute(fileId: number | string): Promise { return await this.filesRepository.getFileDownloadCount(fileId); } diff --git a/src/files/domain/useCases/GetFileUserPermissions.ts b/src/files/domain/useCases/GetFileUserPermissions.ts index 454984ef..ed2ee266 100644 --- a/src/files/domain/useCases/GetFileUserPermissions.ts +++ b/src/files/domain/useCases/GetFileUserPermissions.ts @@ -9,6 +9,12 @@ export class GetFileUserPermissions implements UseCase { this.filesRepository = filesRepository; } + /** + * Returns a FileUserPermissions object, which includes the permissions that the calling user has on a particular File. + * + * @param {number | string} [fileId] - The file identifier, which can be a string (for persistent identifiers), or a number (for numeric identifiers). + * @returns {Promise} + */ async execute(fileId: number | string): Promise { return await this.filesRepository.getFileUserPermissions(fileId); } diff --git a/src/files/index.ts b/src/files/index.ts index 45b8f270..c50c3785 100644 --- a/src/files/index.ts +++ b/src/files/index.ts @@ -6,6 +6,7 @@ import { GetFileUserPermissions } from './domain/useCases/GetFileUserPermissions import { GetFileDataTables } from './domain/useCases/GetFileDataTables'; import { GetDatasetFilesTotalDownloadSize } from './domain/useCases/GetDatasetFilesTotalDownloadSize'; import { GetFile } from './domain/useCases/GetFile'; +import { GetFileCitation } from './domain/useCases/GetFileCitation'; const filesRepository = new FilesRepository(); @@ -16,6 +17,7 @@ const getFileUserPermissions = new GetFileUserPermissions(filesRepository); const getFileDataTables = new GetFileDataTables(filesRepository); const getDatasetFilesTotalDownloadSize = new GetDatasetFilesTotalDownloadSize(filesRepository); const getFile = new GetFile(filesRepository); +const getFileCitation = new GetFileCitation(filesRepository); export { getDatasetFiles, @@ -25,6 +27,7 @@ export { getDatasetFileCounts, getDatasetFilesTotalDownloadSize, getFile, + getFileCitation, }; export { File, FileEmbargo, FileChecksum } from './domain/models/File'; diff --git a/src/files/infra/repositories/FilesRepository.ts b/src/files/infra/repositories/FilesRepository.ts index dff4031b..25a54296 100644 --- a/src/files/infra/repositories/FilesRepository.ts +++ b/src/files/infra/repositories/FilesRepository.ts @@ -153,6 +153,22 @@ export class FilesRepository extends ApiRepository implements IFilesRepository { }); } + public async getFileCitation( + fileId: number | string, + datasetVersionId: string, + includeDeaccessioned: boolean, + ): Promise { + return this.doGet( + this.buildApiEndpoint(this.filesResourceName, `versions/${datasetVersionId}/citation`, fileId), + true, + { includeDeaccessioned: includeDeaccessioned }, + ) + .then((response) => response.data.data.message) + .catch((error) => { + throw error; + }); + } + private getFileEndpoint(fileId: number | string, datasetVersionId: string): string { if (datasetVersionId === DatasetNotNumberedVersion.DRAFT) { return this.buildApiEndpoint(this.filesResourceName, 'draft', fileId); diff --git a/src/info/domain/useCases/GetDataverseVersion.ts b/src/info/domain/useCases/GetDataverseVersion.ts index 4937b9d1..51455549 100644 --- a/src/info/domain/useCases/GetDataverseVersion.ts +++ b/src/info/domain/useCases/GetDataverseVersion.ts @@ -9,6 +9,11 @@ export class GetDataverseVersion implements UseCase { this.dataverseInfoRepository = dataverseInfoRepository; } + /** + * Returns a DataverseVersion object, which contains version information for the Dataverse backend installation. + * + * @returns {Promise} + */ async execute(): Promise { return await this.dataverseInfoRepository.getDataverseVersion(); } diff --git a/src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts b/src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts index b10c4d1e..3c3b62be 100644 --- a/src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts +++ b/src/info/domain/useCases/GetMaxEmbargoDurationInMonths.ts @@ -8,6 +8,11 @@ export class GetMaxEmbargoDurationInMonths implements UseCase { this.dataverseInfoRepository = dataverseInfoRepository; } + /** + * Returns a number indicating the configured maximum embargo duration in months. + * + * @returns {Promise} + */ async execute(): Promise { return await this.dataverseInfoRepository.getMaxEmbargoDurationInMonths(); } diff --git a/src/info/domain/useCases/GetZipDownloadLimit.ts b/src/info/domain/useCases/GetZipDownloadLimit.ts index 84e8af4b..aa93bb6b 100644 --- a/src/info/domain/useCases/GetZipDownloadLimit.ts +++ b/src/info/domain/useCases/GetZipDownloadLimit.ts @@ -8,6 +8,11 @@ export class GetZipDownloadLimit implements UseCase { this.dataverseInfoRepository = dataverseInfoRepository; } + /** + * Returns a number indicating the configured ZIP download limit in bytes. + * + * @returns {Promise} + */ async execute(): Promise { return await this.dataverseInfoRepository.getZipDownloadLimit(); } diff --git a/src/metadataBlocks/domain/models/MetadataBlock.ts b/src/metadataBlocks/domain/models/MetadataBlock.ts index b95bf799..834d7908 100644 --- a/src/metadataBlocks/domain/models/MetadataBlock.ts +++ b/src/metadataBlocks/domain/models/MetadataBlock.ts @@ -10,10 +10,14 @@ export interface MetadataFieldInfo { displayName: string; title: string; type: string; + typeClass: string; watermark: string; description: string; multiple: boolean; isControlledVocabulary: boolean; + controlledVocabularyValues?: string[]; displayFormat: string; childMetadataFields?: Record; + isRequired: boolean; + displayOrder: number; } diff --git a/src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts b/src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts index dc09c1c4..a34953e3 100644 --- a/src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts +++ b/src/metadataBlocks/domain/useCases/GetMetadataBlockByName.ts @@ -9,6 +9,12 @@ export class GetMetadataBlockByName implements UseCase { this.metadataBlocksRepository = metadataBlocksRepository; } + /** + * Returns a MetadataBlock instance, given its name. + * + * @param {string} [metadataBlockName] - The requested metadata block name. + * @returns {Promise} + */ async execute(metadataBlockName: string): Promise { return await this.metadataBlocksRepository.getMetadataBlockByName(metadataBlockName); } diff --git a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts index ad47afdc..00d40262 100644 --- a/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts +++ b/src/metadataBlocks/infra/repositories/transformers/metadataBlockTransformers.ts @@ -28,6 +28,9 @@ const transformPayloadMetadataFieldInfo = (metadataFieldInfoPayload: any, isChil multiple: metadataFieldInfoPayload.multiple, isControlledVocabulary: metadataFieldInfoPayload.isControlledVocabulary, displayFormat: metadataFieldInfoPayload.displayFormat, + isRequired: metadataFieldInfoPayload.isRequired, + displayOrder: metadataFieldInfoPayload.displayOrder, + typeClass: metadataFieldInfoPayload.typeClass, }; if (!isChild && metadataFieldInfoPayload.hasOwnProperty('childFields')) { const childMetadataFieldsPayload = metadataFieldInfoPayload.childFields; diff --git a/src/users/domain/useCases/GetCurrentAuthenticatedUser.ts b/src/users/domain/useCases/GetCurrentAuthenticatedUser.ts index 1b9acb85..6724b518 100644 --- a/src/users/domain/useCases/GetCurrentAuthenticatedUser.ts +++ b/src/users/domain/useCases/GetCurrentAuthenticatedUser.ts @@ -9,6 +9,11 @@ export class GetCurrentAuthenticatedUser implements UseCase { this.usersRepository = usersRepository; } + /** + * Returns the current AuthenticatedUser corresponding to the authentication mechanism provided through ApiConfig. + * + * @returns {Promise} + */ async execute(): Promise { return await this.usersRepository.getCurrentAuthenticatedUser(); } diff --git a/test/integration/datasets/DatasetsRepository.test.ts b/test/integration/datasets/DatasetsRepository.test.ts index 7181548a..d4e272d8 100644 --- a/test/integration/datasets/DatasetsRepository.test.ts +++ b/test/integration/datasets/DatasetsRepository.test.ts @@ -12,6 +12,9 @@ import { DatasetNotNumberedVersion, DatasetLockType, DatasetPreviewSubset } from import { fail } from 'assert'; import { ApiConfig } from '../../../src'; import { DataverseApiAuthMechanism } from '../../../src/core/infra/repositories/ApiConfig'; +import { NewDatasetDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; +import { MetadataBlocksRepository } from '../../../src/metadataBlocks/infra/repositories/MetadataBlocksRepository'; +import { Author, DatasetContact, DatasetDescription } from '../../../src/datasets/domain/models/Dataset'; describe('DatasetsRepository', () => { const sut: DatasetsRepository = new DatasetsRepository(); @@ -19,45 +22,50 @@ describe('DatasetsRepository', () => { const latestVersionId = DatasetNotNumberedVersion.LATEST; - beforeAll(async () => { + beforeEach(async () => { + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY); + }); + + afterEach(async () => { ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY); }); describe('getAllDatasetPreviews', () => { const testPageLimit = 1; + const expectedTotalDatasetCount = 3; test('should return all dataset previews when no pagination params are defined', async () => { const actual: DatasetPreviewSubset = await sut.getAllDatasetPreviews(); - assert.match(actual.datasetPreviews.length, 3); + assert.match(actual.datasetPreviews.length, expectedTotalDatasetCount); assert.match(actual.datasetPreviews[0].title, 'Third Dataset'); - assert.match(actual.totalDatasetCount, 3); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); test('should return first dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 0); assert.match(actual.datasetPreviews.length, 1); assert.match(actual.datasetPreviews[0].title, 'Third Dataset'); - assert.match(actual.totalDatasetCount, 3); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); test('should return second dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 1); assert.match(actual.datasetPreviews.length, 1); assert.match(actual.datasetPreviews[0].title, 'Second Dataset'); - assert.match(actual.totalDatasetCount, 3); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); test('should return third dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 2); assert.match(actual.datasetPreviews.length, 1); assert.match(actual.datasetPreviews[0].title, 'First Dataset'); - assert.match(actual.totalDatasetCount, 3); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); test('should return forth dataset preview page', async () => { const actual = await sut.getAllDatasetPreviews(testPageLimit, 3); assert.match(actual.datasetPreviews.length, 0); - assert.match(actual.totalDatasetCount, 3); + assert.match(actual.totalDatasetCount, expectedTotalDatasetCount); }); test('should return datasets in the specified collection', async () => { @@ -84,6 +92,47 @@ describe('DatasetsRepository', () => { expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_1_ID); }); + test('should return dataset when it is deaccessioned and includeDeaccessioned param is set', async () => { + await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID) + .then() + .catch(() => { + assert.fail('Error while publishing test Dataset'); + }); + + await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_2_ID, 10) + .then() + .catch(() => { + assert.fail('Error while waiting for no locks'); + }); + + await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID, '1.0') + .then() + .catch((error) => { + console.log(JSON.stringify(error)); + assert.fail('Error while deaccessioning test Dataset'); + }); + + const actual = await sut.getDataset(TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, true); + + expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_2_ID); + }); + + test('should return dataset when it is deaccessioned, includeDeaccessioned param is set, and user is unauthenticated', async () => { + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, undefined); + const actual = await sut.getDataset(TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, true); + expect(actual.id).toBe(TestConstants.TEST_CREATED_DATASET_2_ID); + }); + + test('should return error when dataset is deaccessioned and includeDeaccessioned param is not set', async () => { + let error: ReadError = undefined; + await sut.getDataset(TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, false).catch((e) => (error = e)); + + assert.match( + error.message, + `There was an error when reading the resource. Reason was: [404] Dataset version ${latestVersionId} of dataset ${TestConstants.TEST_CREATED_DATASET_2_ID} not found`, + ); + }); + test('should return error when dataset does not exist', async () => { let error: ReadError = undefined; await sut.getDataset(nonExistentTestDatasetId, latestVersionId, false).catch((e) => (error = e)); @@ -188,14 +237,18 @@ describe('DatasetsRepository', () => { test('should return list of dataset locks by dataset id for a dataset while publishing', async () => { await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID) .then() - .catch(() => { + .catch((error) => { + console.log(JSON.stringify(error)); assert.fail('Error while publishing test Dataset'); }); const actual = await sut.getDatasetLocks(TestConstants.TEST_CREATED_DATASET_2_ID); assert.match(actual.length, 1); assert.match(actual[0].lockType, DatasetLockType.FINALIZE_PUBLICATION); assert.match(actual[0].userId, 'dataverseAdmin'); - assert.match(actual[0].message, 'Publishing the dataset; Validating Datafiles Asynchronously'); + assert.match( + actual[0].message, + 'Publishing the dataset; Registering PIDs for Datafiles; Validating Datafiles Asynchronously', + ); }); test('should return error when dataset does not exist', async () => { @@ -232,19 +285,6 @@ describe('DatasetsRepository', () => { }); test('should return citation when dataset is deaccessioned', async () => { - await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_2_ID, 10) - .then() - .catch(() => { - assert.fail('Error while waiting for no locks'); - }); - - await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_2_ID, '1.0') - .then() - .catch((error) => { - console.log(JSON.stringify(error)); - assert.fail('Error while deaccessioning test Dataset'); - }); - const actualDatasetCitation = await sut.getDatasetCitation( TestConstants.TEST_CREATED_DATASET_2_ID, latestVersionId, @@ -253,4 +293,77 @@ describe('DatasetsRepository', () => { expect(typeof actualDatasetCitation).toBe('string'); }); }); + + describe('createDataset', () => { + test('should create a dataset with the provided dataset citation fields', async () => { + const testTitle = 'Dataset created using the createDataset use case'; + const testAuthorName1 = 'Admin, Dataverse'; + const testAuthorName2 = 'Owner, Dataverse'; + const testAuthorAffiliation1 = 'Dataverse.org'; + const testAuthorAffiliation2 = 'Dataversedemo.org'; + const testContactEmail = 'finch@mailinator.com'; + const testContactName = 'Finch, Fiona'; + const testDescription = 'This is the description of the dataset.'; + const testSubject = ['Medicine, Health and Life Sciences']; + + const testNewDataset: NewDatasetDTO = { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: testTitle, + author: [ + { + authorName: testAuthorName1, + authorAffiliation: testAuthorAffiliation1, + }, + { + authorName: testAuthorName2, + authorAffiliation: testAuthorAffiliation2, + }, + ], + datasetContact: [ + { + datasetContactEmail: testContactEmail, + datasetContactName: testContactName, + }, + ], + dsDescription: [ + { + dsDescriptionValue: testDescription, + }, + ], + subject: testSubject, + }, + }, + ], + }; + + const metadataBlocksRepository = new MetadataBlocksRepository(); + const citationMetadataBlock = await metadataBlocksRepository.getMetadataBlockByName('citation'); + const createdDataset = await sut.createDataset(testNewDataset, [citationMetadataBlock], 'root'); + const actualCreatedDataset = await sut.getDataset(createdDataset.numericId, latestVersionId, false); + + expect(actualCreatedDataset.metadataBlocks[0].fields.title).toBe(testTitle); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[0] as Author).authorName).toBe(testAuthorName1); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[0] as Author).authorAffiliation).toBe( + testAuthorAffiliation1, + ); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[1] as Author).authorName).toBe(testAuthorName2); + expect((actualCreatedDataset.metadataBlocks[0].fields.author[1] as Author).authorAffiliation).toBe( + testAuthorAffiliation2, + ); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.datasetContact[0] as DatasetContact).datasetContactEmail, + ).toBe(testContactEmail); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.datasetContact[0] as DatasetContact).datasetContactName, + ).toBe(testContactName); + expect( + (actualCreatedDataset.metadataBlocks[0].fields.dsDescription[0] as DatasetDescription).dsDescriptionValue, + ).toBe(testDescription); + expect(actualCreatedDataset.metadataBlocks[0].fields.subject[0]).toBe(testSubject[0]); + expect(actualCreatedDataset.metadataBlocks[0].fields.subject[1]).toBe(testSubject[1]); + }); + }); }); diff --git a/test/integration/environment/setup.js b/test/integration/environment/setup.js index 198f661d..7e07fc46 100644 --- a/test/integration/environment/setup.js +++ b/test/integration/environment/setup.js @@ -53,35 +53,36 @@ async function setupTestFixtures() { console.log('Creating test datasets...'); await createDatasetViaApi(datasetJson1) .then() - .catch((error) => { + .catch(() => { console.error('Tests setup: Error while creating test Dataset 1'); }); - await createDatasetViaApi(datasetJson2) + await createDatasetViaApi(datasetJson2).catch(() => { + console.error('Tests setup: Error while creating test Dataset 2'); + }); + await createCollectionViaApi(collectionJson) .then() .catch((error) => { - console.error('Tests setup: Error while creating test Dataset 2'); + console.error('Tests setup: Error while creating test Collection 1'); }); - console.log('Creating test collections...'); - await createCollectionViaApi(collectionJson) - .then() - .catch((error) => { - console.error('Tests setup: Error while creating test Collection 1'); - }); await createDatasetViaApi(datasetJson3, collectionJson.alias) - .then() - .catch((error) => { - console.error('Tests setup: Error while creating test Dataset 3'); - }); + .then() + .catch((error) => { + console.error('Tests setup: Error while creating test Dataset 3'); + }); console.log('Test datasets created'); await waitForDatasetsIndexingInSolr(); } -async function createCollectionViaApi( collectionJson) { +async function createCollectionViaApi(collectionJson) { return await axios.post(`${TestConstants.TEST_API_URL}/dataverses/root`, collectionJson, buildRequestHeaders()); } async function createDatasetViaApi(datasetJson, collectionId = 'root') { - return await axios.post(`${TestConstants.TEST_API_URL}/dataverses/${collectionId}/datasets`, datasetJson, buildRequestHeaders()); + return await axios.post( + `${TestConstants.TEST_API_URL}/dataverses/${collectionId}/datasets`, + datasetJson, + buildRequestHeaders(), + ); } async function waitForDatasetsIndexingInSolr() { diff --git a/test/integration/files/FilesRepository.test.ts b/test/integration/files/FilesRepository.test.ts index e0d9897c..090b7e27 100644 --- a/test/integration/files/FilesRepository.test.ts +++ b/test/integration/files/FilesRepository.test.ts @@ -3,7 +3,7 @@ import { ApiConfig, DataverseApiAuthMechanism } from '../../../src/core/infra/re import { assert } from 'sinon'; import { expect } from 'chai'; import { TestConstants } from '../../testHelpers/TestConstants'; -import {registerFileViaApi, uploadFileViaApi} from '../../testHelpers/files/filesHelper'; +import { registerFileViaApi, uploadFileViaApi } from '../../testHelpers/files/filesHelper'; import { DatasetsRepository } from '../../../src/datasets/infra/repositories/DatasetsRepository'; import { ReadError } from '../../../src/core/domain/repositories/ReadError'; import { FileSearchCriteria, FileAccessStatus, FileOrderCriteria } from '../../../src/files/domain/models/FileCriteria'; @@ -11,6 +11,11 @@ import { DatasetNotNumberedVersion } from '../../../src/datasets'; import { FileCounts } from '../../../src/files/domain/models/FileCounts'; import { FileDownloadSizeMode } from '../../../src'; import { fail } from 'assert'; +import { + deaccessionDatasetViaApi, + publishDatasetViaApi, + waitForNoLocks, +} from '../../testHelpers/datasets/datasetHelper'; describe('FilesRepository', () => { const sut: FilesRepository = new FilesRepository(); @@ -32,7 +37,9 @@ describe('FilesRepository', () => { beforeAll(async () => { ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.API_KEY, process.env.TEST_API_KEY); // Uploading test file 1 with some categories - const uploadFileResponse = await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTextFile1Name, { categories: [testCategoryName] }) + const uploadFileResponse = await uploadFileViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, testTextFile1Name, { + categories: [testCategoryName], + }) .then() .catch((e) => { console.log(e); @@ -62,11 +69,11 @@ describe('FilesRepository', () => { // Registering test file 1 await registerFileViaApi(uploadFileResponse.data.data.files[0].dataFile.id); const filesSubset = await sut.getDatasetFiles( - TestConstants.TEST_CREATED_DATASET_1_ID, - latestDatasetVersionId, - false, - FileOrderCriteria.NAME_AZ, - ) + TestConstants.TEST_CREATED_DATASET_1_ID, + latestDatasetVersionId, + false, + FileOrderCriteria.NAME_AZ, + ); testFileId = filesSubset.files[0].id; testFilePersistentId = filesSubset.files[0].persistentId; }); @@ -446,11 +453,11 @@ describe('FilesRepository', () => { describe('getFile', () => { describe('by numeric id', () => { - test('should return file when providing a valid id', async () => { - const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.LATEST); + test('should return file when providing a valid id', async () => { + const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.LATEST); - assert.match(actual.name, testTextFile1Name); - }); + assert.match(actual.name, testTextFile1Name); + }); test('should return file draft when providing a valid id and version is draft', async () => { const actual = await sut.getFile(testFileId, DatasetNotNumberedVersion.DRAFT); @@ -465,21 +472,21 @@ describe('FilesRepository', () => { await sut.getFile(testFileId, '1.0').catch((e) => (error = e)); assert.match( - error.message, - `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, + error.message, + `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, ); }); - test('should return error when file does not exist', async () => { - let error: ReadError = undefined; + test('should return error when file does not exist', async () => { + let error: ReadError = undefined; - await sut.getFile(nonExistentFiledId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); + await sut.getFile(nonExistentFiledId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); - assert.match( - error.message, - `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, - ); - }); + assert.match( + error.message, + `There was an error when reading the resource. Reason was: [404] File with ID 200 not found.`, + ); + }); }); describe('by persistent id', () => { test('should return file when providing a valid persistent id', async () => { @@ -501,8 +508,8 @@ describe('FilesRepository', () => { await sut.getFile(testFilePersistentId, '1.0').catch((e) => (error = e)); assert.match( - error.message, - `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, + error.message, + `Requesting a file by its dataset version is not yet supported. Requested version: 1.0. Please try using the :latest or :draft version instead.`, ); }); @@ -513,10 +520,49 @@ describe('FilesRepository', () => { await sut.getFile(nonExistentFiledPersistentId, DatasetNotNumberedVersion.LATEST).catch((e) => (error = e)); assert.match( - error.message, - `There was an error when reading the resource. Reason was: [400] Error attempting get the requested data file.`, + error.message, + `There was an error when reading the resource. Reason was: [404] Datafile with Persistent ID nonExistentFiledPersistentId not found.`, ); }); }); }); + describe('getFileCitation', () => { + test('should return citation when file exists', async () => { + const actualFileCitation = await sut.getFileCitation(testFileId, DatasetNotNumberedVersion.LATEST, false); + expect(typeof actualFileCitation).to.be.a('string'); + }); + + test('should return citation when dataset is deaccessioned', async () => { + await publishDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID) + .then() + .catch(() => { + assert.fail('Error while publishing test Dataset'); + }); + + await waitForNoLocks(TestConstants.TEST_CREATED_DATASET_1_ID, 10) + .then() + .catch(() => { + assert.fail('Error while waiting for no locks'); + }); + + await deaccessionDatasetViaApi(TestConstants.TEST_CREATED_DATASET_1_ID, '1.0') + .then() + .catch(() => { + assert.fail('Error while deaccessioning test Dataset'); + }); + + const actualFileCitation = await sut.getFileCitation(testFileId, DatasetNotNumberedVersion.LATEST, true); + expect(typeof actualFileCitation).to.be.a('string'); + }); + + test('should return error when file does not exist', async () => { + let error: ReadError = undefined; + await sut.getFileCitation(nonExistentFiledId, DatasetNotNumberedVersion.LATEST, false).catch((e) => (error = e)); + + assert.match( + error.message, + `There was an error when reading the resource. Reason was: [404] File with ID ${nonExistentFiledId} not found.`, + ); + }); + }); }); diff --git a/test/testHelpers/TestConstants.ts b/test/testHelpers/TestConstants.ts index 5ef4dc4e..c637ff3b 100644 --- a/test/testHelpers/TestConstants.ts +++ b/test/testHelpers/TestConstants.ts @@ -44,4 +44,5 @@ export class TestConstants { }; static readonly TEST_CREATED_DATASET_1_ID = 2; static readonly TEST_CREATED_DATASET_2_ID = 3; + static readonly TEST_CREATED_DATASET_3_ID = 4; } diff --git a/test/testHelpers/datasets/datasetHelper.ts b/test/testHelpers/datasets/datasetHelper.ts index b257a0b8..8415fce6 100644 --- a/test/testHelpers/datasets/datasetHelper.ts +++ b/test/testHelpers/datasets/datasetHelper.ts @@ -1,10 +1,8 @@ import { Dataset, DatasetVersionState, DatasetLicense } from '../../../src/datasets/domain/models/Dataset'; -import TurndownService from 'turndown'; +import { NodeHtmlMarkdown } from 'node-html-markdown'; import axios, { AxiosResponse } from 'axios'; import { TestConstants } from '../TestConstants'; -const turndownService = new TurndownService(); - const DATASET_CREATE_TIME_STR = '2023-05-15T08:21:01Z'; const DATASET_UPDATE_TIME_STR = '2023-05-15T08:21:03Z'; const DATASET_RELEASE_TIME_STR = '2023-05-15T08:21:03Z'; @@ -50,7 +48,7 @@ export const createDatasetModel = (license?: DatasetLicense): Dataset => { subject: ['Subject1', 'Subject2'], dsDescription: [ { - dsDescriptionValue: turndownService.turndown(DATASET_HTML_DESCRIPTION), + dsDescriptionValue: NodeHtmlMarkdown.translate(DATASET_HTML_DESCRIPTION), }, ], datasetContact: [ @@ -185,7 +183,7 @@ export const createDatasetVersionPayload = (license?: DatasetLicense): any => { return datasetPayload; }; -export const createDatasetLicenseModel = (withIconUri: boolean = true): DatasetLicense => { +export const createDatasetLicenseModel = (withIconUri = true): DatasetLicense => { const datasetLicense: DatasetLicense = { name: 'CC0 1.0', uri: 'https://creativecommons.org/publicdomain/zero/1.0/', diff --git a/test/testHelpers/datasets/newDatasetHelper.ts b/test/testHelpers/datasets/newDatasetHelper.ts new file mode 100644 index 00000000..4ff08439 --- /dev/null +++ b/test/testHelpers/datasets/newDatasetHelper.ts @@ -0,0 +1,312 @@ +import { NewDatasetDTO, NewDatasetMetadataFieldValueDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; +import { MetadataBlock } from '../../../src'; +import { NewDatasetRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; + +export const createNewDatasetDTO = ( + titleFieldValue?: NewDatasetMetadataFieldValueDTO, + authorFieldValue?: NewDatasetMetadataFieldValueDTO, + alternativeRequiredTitleValue?: NewDatasetMetadataFieldValueDTO, + timePeriodCoveredStartValue?: NewDatasetMetadataFieldValueDTO, + contributorTypeValue?: NewDatasetMetadataFieldValueDTO, +): NewDatasetDTO => { + const validTitle = 'test dataset'; + const validAuthorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'Owner, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + ]; + const validAlternativeRequiredTitleValue = ['alternative1', 'alternative2']; + return { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: titleFieldValue !== undefined ? titleFieldValue : validTitle, + author: authorFieldValue !== undefined ? authorFieldValue : validAuthorFieldValue, + alternativeRequiredTitle: + alternativeRequiredTitleValue !== undefined + ? alternativeRequiredTitleValue + : validAlternativeRequiredTitleValue, + ...(timePeriodCoveredStartValue && { timePeriodCoveredStart: timePeriodCoveredStartValue }), + ...(contributorTypeValue && { + contributor: [ + { + contributorName: 'Admin, Dataverse', + contributorType: contributorTypeValue as string, + }, + ], + }), + }, + }, + ], + }; +}; + +export const createNewDatasetDTOWithoutFirstLevelRequiredField = (): NewDatasetDTO => { + return { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'test dataset', + }, + }, + ], + }; +}; + +export const createNewDatasetDTOWithoutSecondLevelRequiredField = (): NewDatasetDTO => { + return { + metadataBlockValues: [ + { + name: 'citation', + fields: { + title: 'test dataset', + author: [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorAffiliation: 'Dataverse.org', + }, + ], + }, + }, + ], + }; +}; + +/** + * + * This method creates a simplified and altered version of the Citation Metadata Block, only for testing purposes. + * For this reason some of the metadata fields do not correspond to the real ones. + * + * @returns {MetadataBlock} A MetadataBlock testing instance. + * + **/ +export const createNewDatasetMetadataBlockModel = (): MetadataBlock => { + return { + id: 1, + name: 'citation', + displayName: 'Citation Metadata', + metadataFields: { + title: { + name: 'title', + displayName: 'title', + title: 'title', + type: 'DatasetField', + watermark: 'watermark', + description: 'description', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', + }, + author: { + name: 'author', + displayName: 'author', + title: 'author', + type: 'NONE', + watermark: 'watermark', + description: 'description', + multiple: true, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 1, + typeClass: 'compound', + childMetadataFields: { + authorName: { + name: 'authorName', + displayName: 'author name', + title: 'author name', + type: 'TEXT', + watermark: 'watermark', + description: 'description', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 2, + typeClass: 'primitive', + }, + authorAffiliation: { + name: 'authorAffiliation', + displayName: 'author affiliation', + title: 'author affiliation', + type: 'TEXT', + watermark: 'watermark', + description: 'descriprion', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: false, + displayOrder: 3, + typeClass: 'primitive', + }, + }, + }, + alternativeRequiredTitle: { + name: 'alternativeRequiredTitle', + displayName: 'Alternative Required Title', + title: 'Alternative Title', + type: 'TEXT', + watermark: '', + description: 'Either 1) a title commonly used to refer to the Dataset or 2) an abbreviation of the main title', + multiple: true, + isControlledVocabulary: false, + displayFormat: '', + isRequired: true, + displayOrder: 4, + typeClass: 'primitive', + }, + timePeriodCoveredStart: { + name: 'timePeriodCoveredStart', + displayName: 'Time Period Start Date', + title: 'Start Date', + type: 'DATE', + watermark: 'YYYY-MM-DD', + description: 'The start date of the time period that the data refer to', + multiple: false, + isControlledVocabulary: false, + displayFormat: '#NAME: #VALUE ', + isRequired: false, + displayOrder: 5, + typeClass: 'primitive', + }, + contributor: { + name: 'contributor', + displayName: 'Contributor', + title: 'Contributor', + type: 'NONE', + watermark: '', + description: + 'The entity, such as a person or organization, responsible for collecting, managing, or otherwise contributing to the development of the Dataset', + multiple: true, + isControlledVocabulary: false, + displayFormat: ':', + isRequired: false, + displayOrder: 6, + typeClass: 'compound', + childMetadataFields: { + contributorType: { + name: 'contributorType', + displayName: 'Contributor Type', + title: 'Type', + type: 'TEXT', + watermark: '', + description: 'Indicates the type of contribution made to the dataset', + multiple: false, + isControlledVocabulary: true, + displayFormat: '#VALUE ', + isRequired: false, + displayOrder: 7, + controlledVocabularyValues: [ + 'Data Collector', + 'Data Curator', + 'Data Manager', + 'Editor', + 'Funder', + 'Hosting Institution', + 'Project Leader', + 'Project Manager', + 'Project Member', + 'Related Person', + 'Researcher', + 'Research Group', + 'Rights Holder', + 'Sponsor', + 'Supervisor', + 'Work Package Leader', + 'Other', + ], + typeClass: 'controlledVocabulary', + }, + contributorName: { + name: 'contributorName', + displayName: 'Contributor Name', + title: 'Name', + type: 'TEXT', + watermark: '1) FamilyName, GivenName or 2) Organization', + description: "The name of the contributor, e.g. the person's name or the name of an organization", + multiple: false, + isControlledVocabulary: false, + displayFormat: '#VALUE', + isRequired: true, + displayOrder: 8, + typeClass: 'primitive', + }, + }, + }, + }, + }; +}; + +export const createNewDatasetRequestPayload = (): NewDatasetRequestPayload => { + return { + datasetVersion: { + metadataBlocks: { + citation: { + fields: [ + { + value: 'test dataset', + typeClass: 'primitive', + multiple: false, + typeName: 'title', + }, + { + value: [ + { + authorName: { + value: 'Admin, Dataverse', + typeClass: 'primitive', + multiple: false, + typeName: 'authorName', + }, + authorAffiliation: { + value: 'Dataverse.org', + typeClass: 'primitive', + multiple: false, + typeName: 'authorAffiliation', + }, + }, + { + authorName: { + value: 'Owner, Dataverse', + typeClass: 'primitive', + multiple: false, + typeName: 'authorName', + }, + authorAffiliation: { + value: 'Dataverse.org', + typeClass: 'primitive', + multiple: false, + typeName: 'authorAffiliation', + }, + }, + ], + typeClass: 'compound', + multiple: true, + typeName: 'author', + }, + { + value: ['alternative1', 'alternative2'], + typeClass: 'primitive', + multiple: true, + typeName: 'alternativeRequiredTitle', + }, + ], + displayName: 'Citation Metadata', + }, + }, + }, + }; +}; diff --git a/test/testHelpers/datasets/test-dataset-3.json b/test/testHelpers/datasets/test-dataset-3.json index 34e7f8ab..4f867c90 100644 --- a/test/testHelpers/datasets/test-dataset-3.json +++ b/test/testHelpers/datasets/test-dataset-3.json @@ -60,7 +60,7 @@ "value": [ { "dsDescriptionValue": { - "value": "This is the description of the second dataset.", + "value": "This is the description of the third dataset.", "multiple": false, "typeClass": "primitive", "typeName": "dsDescriptionValue" diff --git a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts index 54b12cfe..28b013be 100644 --- a/test/testHelpers/metadataBlocks/metadataBlockHelper.ts +++ b/test/testHelpers/metadataBlocks/metadataBlockHelper.ts @@ -16,6 +16,9 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, testField2: { name: 'testName2', @@ -27,6 +30,9 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: true, isControlledVocabulary: false, displayFormat: '', + isRequired: true, + displayOrder: 0, + typeClass: 'compound', childMetadataFields: { testField3: { name: 'testName3', @@ -38,6 +44,9 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, testField4: { name: 'testName4', @@ -49,6 +58,9 @@ export const createMetadataBlockModel = (): MetadataBlock => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, }, }, @@ -72,6 +84,9 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, testField2: { name: 'testName2', @@ -83,6 +98,9 @@ export const createMetadataBlockPayload = (): any => { multiple: true, isControlledVocabulary: false, displayFormat: '', + isRequired: true, + displayOrder: 0, + typeClass: 'compound', childFields: { testField3: { name: 'testName3', @@ -94,6 +112,9 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, testField4: { name: 'testName4', @@ -105,6 +126,9 @@ export const createMetadataBlockPayload = (): any => { multiple: false, isControlledVocabulary: false, displayFormat: '#VALUE', + isRequired: true, + displayOrder: 0, + typeClass: 'primitive', }, }, }, diff --git a/test/unit/datasets/CreateDataset.test.ts b/test/unit/datasets/CreateDataset.test.ts new file mode 100644 index 00000000..f3f91388 --- /dev/null +++ b/test/unit/datasets/CreateDataset.test.ts @@ -0,0 +1,125 @@ +import { CreateDataset } from '../../../src/datasets/domain/useCases/CreateDataset'; +import { CreatedDatasetIdentifiers } from '../../../src/datasets/domain/models/CreatedDatasetIdentifiers'; +import { IDatasetsRepository } from '../../../src/datasets/domain/repositories/IDatasetsRepository'; +import { assert, createSandbox, SinonSandbox } from 'sinon'; +import { NewResourceValidator } from '../../../src/core/domain/useCases/validators/NewResourceValidator'; +import { createNewDatasetDTO, createNewDatasetMetadataBlockModel } from '../../testHelpers/datasets/newDatasetHelper'; +import { ResourceValidationError } from '../../../src/core/domain/useCases/validators/errors/ResourceValidationError'; +import { WriteError, ReadError } from '../../../src'; +import { IMetadataBlocksRepository } from '../../../src/metadataBlocks/domain/repositories/IMetadataBlocksRepository'; + +describe('execute', () => { + const sandbox: SinonSandbox = createSandbox(); + const testDataset = createNewDatasetDTO(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + + afterEach(() => { + sandbox.restore(); + }); + + test('should return new dataset identifiers when validation is successful and repository call is successful', async () => { + const testCreatedDatasetIdentifiers: CreatedDatasetIdentifiers = { + persistentId: 'test', + numericId: 1, + }; + + const datasetsRepositoryStub = {}; + const createDatasetStub = sandbox.stub().returns(testCreatedDatasetIdentifiers); + datasetsRepositoryStub.createDataset = createDatasetStub; + + const newDatasetValidatorStub = {}; + const validateStub = sandbox.stub().resolves(); + newDatasetValidatorStub.validate = validateStub; + + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryStub, metadataBlocksRepositoryStub, newDatasetValidatorStub); + + const actual = await sut.execute(testDataset); + + assert.match(actual, testCreatedDatasetIdentifiers); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root'); + + assert.callOrder(validateStub, createDatasetStub); + }); + + test('should throw ResourceValidationError and not call repository when validation is unsuccessful', async () => { + const datasetsRepositoryMock = {}; + const createDatasetMock = sandbox.stub(); + datasetsRepositoryMock.createDataset = createDatasetMock; + + const newDatasetValidatorStub = {}; + const testValidationError = new ResourceValidationError('Test error'); + const validateStub = sandbox.stub().throwsException(testValidationError); + newDatasetValidatorStub.validate = validateStub; + + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryMock, metadataBlocksRepositoryStub, newDatasetValidatorStub); + let actualError: ResourceValidationError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testValidationError); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + assert.calledWithExactly(validateStub, testDataset, testMetadataBlocks); + assert.notCalled(createDatasetMock); + }); + + test('should throw WriteError when validation is successful and repository raises an error', async () => { + const datasetsRepositoryStub = {}; + const testWriteError = new WriteError('Test error'); + const createDatasetStub = sandbox.stub().throwsException(testWriteError); + datasetsRepositoryStub.createDataset = createDatasetStub; + + const newDatasetValidatorStub = {}; + const validateMock = sandbox.stub().resolves(); + newDatasetValidatorStub.validate = validateMock; + + const metadataBlocksRepositoryStub = {}; + const getMetadataBlockByNameStub = sandbox.stub().resolves(testMetadataBlocks[0]); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryStub, metadataBlocksRepositoryStub, newDatasetValidatorStub); + let actualError: WriteError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testWriteError); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + assert.calledWithExactly(validateMock, testDataset, testMetadataBlocks); + assert.calledWithExactly(createDatasetStub, testDataset, testMetadataBlocks, 'root'); + + assert.callOrder(validateMock, createDatasetStub); + }); + + test('should throw ReadError when metadata blocks repository raises an error', async () => { + const datasetsRepositoryMock = {}; + const createDatasetMock = sandbox.stub(); + datasetsRepositoryMock.createDataset = createDatasetMock; + + const newDatasetValidatorMock = {}; + const validateMock = sandbox.stub().resolves(); + newDatasetValidatorMock.validate = validateMock; + + const metadataBlocksRepositoryStub = {}; + const testReadError = new ReadError('Test error'); + const getMetadataBlockByNameStub = sandbox.stub().throwsException(testReadError); + metadataBlocksRepositoryStub.getMetadataBlockByName = getMetadataBlockByNameStub; + + const sut = new CreateDataset(datasetsRepositoryMock, metadataBlocksRepositoryStub, newDatasetValidatorMock); + let actualError: ReadError = undefined; + await sut.execute(testDataset).catch((e) => (actualError = e)); + assert.match(actualError, testReadError); + + assert.notCalled(validateMock); + assert.notCalled(createDatasetMock); + + assert.calledWithExactly(getMetadataBlockByNameStub, testMetadataBlocks[0].name); + }); +}); diff --git a/test/unit/datasets/DatasetsRepository.test.ts b/test/unit/datasets/DatasetsRepository.test.ts index 892d471a..bab7e0dd 100644 --- a/test/unit/datasets/DatasetsRepository.test.ts +++ b/test/unit/datasets/DatasetsRepository.test.ts @@ -17,6 +17,12 @@ import { createDatasetPreviewModel, createDatasetPreviewPayload, } from '../../testHelpers/datasets/datasetPreviewHelper'; +import { + createNewDatasetDTO, + createNewDatasetMetadataBlockModel, + createNewDatasetRequestPayload, +} from '../../testHelpers/datasets/newDatasetHelper'; +import { WriteError } from '../../../src'; describe('DatasetsRepository', () => { const sandbox: SinonSandbox = createSandbox(); @@ -87,11 +93,11 @@ describe('DatasetsRepository', () => { describe('getDataset', () => { const testIncludeDeaccessioned = false; const expectedRequestConfigApiKey = { - params: { includeDeaccessioned: testIncludeDeaccessioned, includeFiles: false }, + params: { includeDeaccessioned: testIncludeDeaccessioned, excludeFiles: true }, headers: TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY.headers, }; const expectedRequestConfigSessionCookie = { - params: { includeDeaccessioned: testIncludeDeaccessioned, includeFiles: false }, + params: { includeDeaccessioned: testIncludeDeaccessioned, excludeFiles: true }, withCredentials: TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE.withCredentials, headers: TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE.headers, }; @@ -635,4 +641,73 @@ describe('DatasetsRepository', () => { expect(error).to.be.instanceOf(Error); }); }); + + describe('createDataset', () => { + const testNewDataset = createNewDatasetDTO(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + const testCollectionName = 'test'; + const expectedNewDatasetRequestPayloadJson = JSON.stringify(createNewDatasetRequestPayload()); + + const testCreatedDatasetIdentifiers = { + persistentId: 'test', + numericId: 1, + }; + + const testCreateDatasetResponse = { + data: { + status: 'OK', + data: { + id: testCreatedDatasetIdentifiers.numericId, + persistentId: testCreatedDatasetIdentifiers.persistentId, + }, + }, + }; + + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/dataverses/${testCollectionName}/datasets`; + + test('should call the API with a correct request payload', async () => { + const axiosPostStub = sandbox.stub(axios, 'post').resolves(testCreateDatasetResponse); + + // API Key auth + let actual = await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); + + assert.calledWithExactly( + axiosPostStub, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + + assert.match(actual, testCreatedDatasetIdentifiers); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + actual = await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName); + + assert.calledWithExactly( + axiosPostStub, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE, + ); + + assert.match(actual, testCreatedDatasetIdentifiers); + }); + + test('should return error result on error response', async () => { + const axiosPostStub = sandbox.stub(axios, 'post').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: WriteError = undefined; + await sut.createDataset(testNewDataset, testMetadataBlocks, testCollectionName).catch((e) => (error = e)); + + assert.calledWithExactly( + axiosPostStub, + expectedApiEndpoint, + expectedNewDatasetRequestPayloadJson, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY, + ); + expect(error).to.be.instanceOf(Error); + }); + }); }); diff --git a/test/unit/datasets/NewDatasetResourceValidator.test.ts b/test/unit/datasets/NewDatasetResourceValidator.test.ts new file mode 100644 index 00000000..3c0e1c59 --- /dev/null +++ b/test/unit/datasets/NewDatasetResourceValidator.test.ts @@ -0,0 +1,196 @@ +import { NewDatasetResourceValidator } from '../../../src/datasets/domain/useCases/validators/NewDatasetResourceValidator'; +import { assert } from 'sinon'; +import { + createNewDatasetDTO, + createNewDatasetMetadataBlockModel, + createNewDatasetDTOWithoutFirstLevelRequiredField, +} from '../../testHelpers/datasets/newDatasetHelper'; +import { fail } from 'assert'; +import { EmptyFieldError } from '../../../src/datasets/domain/useCases/validators/errors/EmptyFieldError'; +import { FieldValidationError } from '../../../src/datasets/domain/useCases/validators/errors/FieldValidationError'; +import { NewDatasetDTO, NewDatasetMetadataFieldValueDTO } from '../../../src/datasets/domain/dtos/NewDatasetDTO'; +import { SingleMetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/SingleMetadataFieldValidator'; +import { MetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/MetadataFieldValidator'; +import { MultipleMetadataFieldValidator } from '../../../src/datasets/domain/useCases/validators/MultipleMetadataFieldValidator'; + +describe('validate', () => { + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + + const singleMetadataFieldValidator = new SingleMetadataFieldValidator(); + const metadataFieldValidator = new MetadataFieldValidator( + new SingleMetadataFieldValidator(), + new MultipleMetadataFieldValidator(singleMetadataFieldValidator), + ); + const sut = new NewDatasetResourceValidator(metadataFieldValidator); + + async function runValidateExpectingFieldValidationError( + newDataset: NewDatasetDTO, + expectedMetadataFieldName: string, + expectedErrorMessage: string, + expectedParentMetadataFieldName?: string, + expectedPosition?: number, + ): Promise { + await sut + .validate(newDataset, testMetadataBlocks) + .then(() => { + fail('Validation should fail'); + }) + .catch((error) => { + const fieldValidationError = error as T; + assert.match(fieldValidationError.citationBlockName, 'citation'); + assert.match(fieldValidationError.metadataFieldName, expectedMetadataFieldName); + assert.match(fieldValidationError.parentMetadataFieldName, expectedParentMetadataFieldName); + assert.match(fieldValidationError.fieldPosition, expectedPosition); + assert.match(fieldValidationError.message, expectedErrorMessage); + }); + } + + test('should not raise a validation error when a new dataset with only the required fields is valid', async () => { + const testNewDataset = createNewDatasetDTO(); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); + }); + + test('should raise an empty field error when a first level required string field is missing', async () => { + await runValidateExpectingFieldValidationError( + createNewDatasetDTOWithoutFirstLevelRequiredField(), + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', + ); + }); + + test('should raise an empty field error when a first level required array field is empty', async () => { + const invalidAuthorFieldValue: NewDatasetMetadataFieldValueDTO = []; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: The field should not be empty.', + ); + }); + + test('should raise an error when the provided field value for an unique field is an array', async () => { + const invalidTitleFieldValue = ['title1', 'title2']; + const testNewDataset = createNewDatasetDTO(invalidTitleFieldValue, undefined, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'title', + 'There was an error when validating the field title from metadata block citation. Reason was: Expecting a single field, not an array.', + ); + }); + + test('should raise an error when the provided field value is an object and the field expects a string', async () => { + const invalidTitleFieldValue = { + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', + }; + const testNewDataset = createNewDatasetDTO(invalidTitleFieldValue, undefined, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'title', + 'There was an error when validating the field title from metadata block citation. Reason was: Expecting a string, not child fields.', + ); + }); + + test('should raise an error when the provided field value for a multiple field is a string', async () => { + const invalidAuthorFieldValue = 'invalidValue'; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of values.', + ); + }); + + test('should raise an error when the provided field value is an array of strings and the field expects an array of objects', async () => { + const invalidAuthorFieldValue = ['invalidValue1', 'invalidValue2']; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'author', + 'There was an error when validating the field author from metadata block citation. Reason was: Expecting an array of child fields, not strings', + ); + }); + + test('should raise an error when the provided field value is an array of objects and the field expects an array of strings', async () => { + const invalidAlternativeTitleFieldValue = [ + { + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', + }, + { + invalidChildField1: 'invalid value 1', + invalidChildField2: 'invalid value 2', + }, + ]; + const testNewDataset = createNewDatasetDTO(undefined, undefined, invalidAlternativeTitleFieldValue); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'alternativeRequiredTitle', + 'There was an error when validating the field alternativeRequiredTitle from metadata block citation. Reason was: Expecting an array of strings, not child fields', + ); + }); + + test('should raise an empty field error when a required child field is missing', async () => { + const invalidAuthorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorAffiliation: 'Dataverse.org', + }, + ]; + const testNewDataset = createNewDatasetDTO(undefined, invalidAuthorFieldValue, undefined); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'authorName', + 'There was an error when validating the field authorName from metadata block citation with parent field author in position 1. Reason was: The field should not be empty.', + 'author', + 1, + ); + }); + + test('should not raise an empty field error when a not required child field is missing', async () => { + const authorFieldValue = [ + { + authorName: 'Admin, Dataverse', + authorAffiliation: 'Dataverse.org', + }, + { + authorName: 'John, Doe', + }, + ]; + const testNewDataset = createNewDatasetDTO(undefined, authorFieldValue, undefined); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); + }); + + test('should raise a date format validation error when a date field has an invalid format', async () => { + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, '1-1-2020'); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'timePeriodCoveredStart', + 'There was an error when validating the field timePeriodCoveredStart from metadata block citation. Reason was: The field requires a valid date format (YYYY-MM-DD).', + ); + }); + + test('should not raise a date format validation error when a date field has a valid format', async () => { + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, '2020-01-01'); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); + }); + + test('should raise a controlled vocabulary error when a controlled vocabulary field has an invalid format', async () => { + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, undefined, 'Wrong Value'); + await runValidateExpectingFieldValidationError( + testNewDataset, + 'contributorType', + 'There was an error when validating the field contributorType from metadata block citation with parent field contributor. Reason was: The field does not have a valid controlled vocabulary value.', + 'contributor', + 0, + ); + }); + + test('should not raise a controlled vocabulary error when the value for a controlled vocabulary field is correct', async () => { + const testNewDataset = createNewDatasetDTO(undefined, undefined, undefined, undefined, 'Project Member'); + await sut.validate(testNewDataset, testMetadataBlocks).catch((e) => fail(e)); + }); +}); diff --git a/test/unit/datasets/newDatasetTransformers.test.ts b/test/unit/datasets/newDatasetTransformers.test.ts new file mode 100644 index 00000000..93e19d9b --- /dev/null +++ b/test/unit/datasets/newDatasetTransformers.test.ts @@ -0,0 +1,17 @@ +import { assert } from 'sinon'; +import { + createNewDatasetMetadataBlockModel, + createNewDatasetDTO, + createNewDatasetRequestPayload, +} from '../../testHelpers/datasets/newDatasetHelper'; +import { transformNewDatasetModelToRequestPayload } from '../../../src/datasets/infra/repositories/transformers/newDatasetTransformers'; + +describe('transformNewDatasetModelToRequestPayload', () => { + test('should correctly transform a new dataset model to a new dataset request payload', async () => { + const testNewDataset = createNewDatasetDTO(); + const testMetadataBlocks = [createNewDatasetMetadataBlockModel()]; + const expectedNewDatasetRequestPayload = createNewDatasetRequestPayload(); + const actual = transformNewDatasetModelToRequestPayload(testNewDataset, testMetadataBlocks); + assert.match(actual, expectedNewDatasetRequestPayload); + }); +}); diff --git a/test/unit/files/FilesRepository.test.ts b/test/unit/files/FilesRepository.test.ts index 4348d1d6..38cd4d92 100644 --- a/test/unit/files/FilesRepository.test.ts +++ b/test/unit/files/FilesRepository.test.ts @@ -874,4 +874,57 @@ describe('FilesRepository', () => { }); }); }); + + describe('getFileCitation', () => { + const testIncludeDeaccessioned = true; + const testCitation = 'test citation'; + const testCitationSuccessfulResponse = { + data: { + status: 'OK', + data: { + message: testCitation, + }, + }, + }; + test('should return citation when response is successful', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').resolves(testCitationSuccessfulResponse); + const expectedApiEndpoint = `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST}/citation`; + + // API Key auth + let actual = await sut.getFileCitation(testFile.id, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, + ); + assert.match(actual, testCitation); + + // Session cookie auth + ApiConfig.init(TestConstants.TEST_API_URL, DataverseApiAuthMechanism.SESSION_COOKIE); + + actual = await sut.getFileCitation(testFile.id, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned); + + assert.calledWithExactly( + axiosGetStub, + expectedApiEndpoint, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_SESSION_COOKIE_INCLUDE_DEACCESSIONED, + ); + assert.match(actual, testCitation); + }); + + test('should return error on repository read error', async () => { + const axiosGetStub = sandbox.stub(axios, 'get').rejects(TestConstants.TEST_ERROR_RESPONSE); + + let error: ReadError = undefined; + await sut.getFileCitation(1, DatasetNotNumberedVersion.LATEST, testIncludeDeaccessioned).catch((e) => (error = e)); + + assert.calledWithExactly( + axiosGetStub, + `${TestConstants.TEST_API_URL}/files/${testFile.id}/versions/${DatasetNotNumberedVersion.LATEST}/citation`, + TestConstants.TEST_EXPECTED_AUTHENTICATED_REQUEST_CONFIG_API_KEY_INCLUDE_DEACCESSIONED, + ); + expect(error).to.be.instanceOf(Error); + }); + }); }); diff --git a/test/unit/files/GetFileCitation.test.ts b/test/unit/files/GetFileCitation.test.ts new file mode 100644 index 00000000..ac40b547 --- /dev/null +++ b/test/unit/files/GetFileCitation.test.ts @@ -0,0 +1,39 @@ +import {assert, createSandbox, SinonSandbox} from "sinon"; +import {DatasetNotNumberedVersion, ReadError} from "../../../src"; +import {IFilesRepository} from "../../../src/files/domain/repositories/IFilesRepository"; +import {GetFileCitation} from "../../../src/files/domain/useCases/GetFileCitation"; + +describe('execute', () => { + const sandbox: SinonSandbox = createSandbox(); + const testId = 1; + + afterEach(() => { + sandbox.restore(); + }); + + test('should return successful result with file citation on repository success', async () => { + const testCitation = 'test citation'; + const filesRepositoryStub = {}; + const getFileCitation = sandbox.stub().returns(testCitation); + filesRepositoryStub.getFileCitation = getFileCitation; + + const sut = new GetFileCitation(filesRepositoryStub); + + const actual = await sut.execute(testId); + + assert.match(actual, testCitation); + assert.calledWithExactly(getFileCitation, testId, DatasetNotNumberedVersion.LATEST, false); + }); + + test('should return error result on repository error', async () => { + const filesRepositoryStub = {}; + const testReadError = new ReadError(); + filesRepositoryStub.getFileCitation = sandbox.stub().throwsException(testReadError); + const sut = new GetFileCitation(filesRepositoryStub); + + let actualError: ReadError = undefined; + await sut.execute(testId).catch((e) => (actualError = e)); + + assert.match(actualError, testReadError); + }); +}) \ No newline at end of file