diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index b297dfc4ee8..7e6995d76d9 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -3,7 +3,7 @@ name: Bug report about: Did you encounter something unexpected or incorrect in the Dataverse software? We'd like to hear about it! title: '' -labels: '' +labels: 'Type: Bug' assignees: '' --- diff --git a/README.md b/README.md index 01de5b2c854..2333787d4b9 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ We love contributors! Please see our [Contributing Guide][] for ways you can hel Dataverse is a trademark of President and Fellows of Harvard College and is registered in the United States. -[![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg?raw=true "Dataverse Project")](http://dataverse.org) +[![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg "Dataverse Project")](http://dataverse.org) [![API Test Status](https://jenkins.dataverse.org/buildStatus/icon?job=IQSS-dataverse-develop&subject=API%20Test%20Status)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/) [![API Test Coverage](https://img.shields.io/jenkins/coverage/jacoco?jobUrl=https%3A%2F%2Fjenkins.dataverse.org%2Fjob%2FIQSS-dataverse-develop&label=API%20Test%20Coverage)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/ws/target/coverage-it/index.html) diff --git a/doc/release-notes/10001-datasets-files-api-user-permissions.md b/doc/release-notes/10001-datasets-files-api-user-permissions.md new file mode 100644 index 00000000000..0aa75f9218a --- /dev/null +++ b/doc/release-notes/10001-datasets-files-api-user-permissions.md @@ -0,0 +1,13 @@ +- New query parameter `includeDeaccessioned` added to the getVersion endpoint (/api/datasets/{id}/versions/{versionId}) to consider deaccessioned versions when searching for versions. + + +- New endpoint to get user permissions on a dataset (/api/datasets/{id}/userPermissions). In particular, the user permissions that this API call checks, returned as booleans, are the following: + + - Can view the unpublished dataset + - Can edit the dataset + - Can publish the dataset + - Can manage the dataset permissions + - Can delete the dataset draft + + +- New permission check "canManageFilePermissions" added to the existing endpoint for getting user permissions on a file (/api/access/datafile/{id}/userPermissions). \ No newline at end of file diff --git a/doc/release-notes/9412-markdown-previewer.md b/doc/release-notes/9412-markdown-previewer.md new file mode 100644 index 00000000000..8faa2679fb0 --- /dev/null +++ b/doc/release-notes/9412-markdown-previewer.md @@ -0,0 +1 @@ +There is now a Markdown (.md) previewer: https://dataverse-guide--9986.org.readthedocs.build/en/9986/user/dataset-management.html#file-previews diff --git a/doc/release-notes/9714-files-api-extension-filters.md b/doc/release-notes/9714-files-api-extension-filters.md new file mode 100644 index 00000000000..034230efe61 --- /dev/null +++ b/doc/release-notes/9714-files-api-extension-filters.md @@ -0,0 +1,14 @@ +The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support optional filtering by: + +- Access status: through the `accessStatus` query parameter, which supports the following values: + + - Public + - Restricted + - EmbargoedThenRestricted + - EmbargoedThenPublic + + +- Category name: through the `categoryName` query parameter. To return files to which the particular category has been added. + + +- Content type: through the `contentType` query parameter. To return files matching the requested content type. For example: "image/png". diff --git a/doc/release-notes/9763-versions-api-improvements.md b/doc/release-notes/9763-versions-api-improvements.md new file mode 100644 index 00000000000..8d7f6c7a20a --- /dev/null +++ b/doc/release-notes/9763-versions-api-improvements.md @@ -0,0 +1,8 @@ +# Improvements in the /versions API + +- optional pagination has been added to `/api/datasets/{id}/versions` that may be useful in datasets with a large number of versions; +- a new flag `includeFiles` is added to both `/api/datasets/{id}/versions` and `/api/datasets/{id}/versions/{vid}` (true by default), providing an option to drop the file information from the output; +- when files are requested to be included, some database lookup optimizations have been added to improve the performance on datasets with large numbers of files. + +This is reflected in the [Dataset Versions API](https://guides.dataverse.org/en/9763-lookup-optimizations/api/native-api.html#dataset-versions-api) section of the Guide. + diff --git a/doc/release-notes/9785-files-api-extension-search-text.md b/doc/release-notes/9785-files-api-extension-search-text.md new file mode 100644 index 00000000000..fb185e1c7af --- /dev/null +++ b/doc/release-notes/9785-files-api-extension-search-text.md @@ -0,0 +1,3 @@ +The getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) has been extended to support optional filtering by search text through the `searchText` query parameter. + +The search will be applied to the labels and descriptions of the dataset files. diff --git a/doc/release-notes/9834-files-api-extension-counts.md b/doc/release-notes/9834-files-api-extension-counts.md new file mode 100644 index 00000000000..3ec15d8bd36 --- /dev/null +++ b/doc/release-notes/9834-files-api-extension-counts.md @@ -0,0 +1,6 @@ +Implemented the following new endpoints: + +- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts): Given a dataset and its version, retrieves file counts based on different criteria (Total count, per content type, per access status and per category name). + + +- setFileCategories (/api/files/{id}/metadata/categories): Updates the categories (by name) for an existing file. If the specified categories do not exist, they will be created. diff --git a/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md new file mode 100644 index 00000000000..f306ae2ab80 --- /dev/null +++ b/doc/release-notes/9851-datafile-payload-extension-new-file-access-endpoints.md @@ -0,0 +1,14 @@ +Implemented the following new endpoints: + +- userFileAccessRequested (/api/access/datafile/{id}/userFileAccessRequested): Returns true or false depending on whether or not the calling user has requested access to a particular file. + + +- hasBeenDeleted (/api/files/{id}/hasBeenDeleted): Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. + + +In addition, the DataFile API payload has been extended to include the following fields: + +- tabularData: Boolean field to know if the DataFile is of tabular type + + +- fileAccessRequest: Boolean field to know if the file access requests are enabled on the Dataset (DataFile owner) diff --git a/doc/release-notes/9852-files-api-extension-deaccession.md b/doc/release-notes/9852-files-api-extension-deaccession.md new file mode 100644 index 00000000000..55698580e3c --- /dev/null +++ b/doc/release-notes/9852-files-api-extension-deaccession.md @@ -0,0 +1,12 @@ +Extended the existing endpoints: + +- getVersionFiles (/api/datasets/{id}/versions/{versionId}/files) +- getVersionFileCounts (/api/datasets/{id}/versions/{versionId}/files/counts) + +The above endpoints now accept a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned versions when searching for versions to obtain files or file counts. + +Additionally, a new endpoint has been developed to support version deaccessioning through API (Given a dataset and a version). + +- deaccessionDataset (/api/datasets/{id}/versions/{versionId}/deaccession) + +Finally, the DataFile API payload has been extended to add the field "friendlyType" diff --git a/doc/release-notes/9907-files-api-counts-with-criteria.md b/doc/release-notes/9907-files-api-counts-with-criteria.md new file mode 100644 index 00000000000..07cd23daad0 --- /dev/null +++ b/doc/release-notes/9907-files-api-counts-with-criteria.md @@ -0,0 +1,11 @@ +Extended the getVersionFileCounts endpoint (/api/datasets/{id}/versions/{versionId}/files/counts) to support filtering by criteria. + +In particular, the endpoint now accepts the following optional criteria query parameters: + +- contentType +- accessStatus +- categoryName +- tabularTagName +- searchText + +This filtering criteria is the same as the one for the getVersionFiles endpoint. diff --git a/doc/release-notes/9955-Signposting-updates.md b/doc/release-notes/9955-Signposting-updates.md new file mode 100644 index 00000000000..db0e27e51c5 --- /dev/null +++ b/doc/release-notes/9955-Signposting-updates.md @@ -0,0 +1,7 @@ +This release fixes several issues (#9952, #9953, #9957) where the Signposting output did not match the Signposting specification. These changes introduce backward-incompatibility, but since Signposting support was added recently (in Dataverse 5.14 in PR #8981), we feel it's best to do this clean up and not support the old implementation that was not fully compliant with the spec. + +To fix #9952, we surround the license info with `<` and `>`. + +To fix #9953, we no longer wrap the response in a `{"status":"OK","data":{` JSON object. This has also been noted in the guides at https://dataverse-guide--9955.org.readthedocs.build/en/9955/api/native-api.html#retrieve-signposting-information + +To fix #9957, we corrected the mime/content type, changing it from `json+ld` to `ld+json`. For backward compatibility, we are still supporting the old one, for now. diff --git a/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md new file mode 100644 index 00000000000..2ede679b361 --- /dev/null +++ b/doc/release-notes/9958-dataset-api-downloadsize-ignore-tabular-size.md @@ -0,0 +1,9 @@ +Added a new optional query parameter "mode" to the "getDownloadSize" API endpoint ("api/datasets/{identifier}/versions/{versionId}/downloadsize"). + +This parameter applies a filter criteria to the operation and supports the following values: + +- All (Default): Includes both archival and original sizes for tabular files + +- Archival: Includes only the archival size for tabular files + +- Original: Includes only the original size for tabular files diff --git a/doc/release-notes/9972-files-api-filter-by-tabular-tags.md b/doc/release-notes/9972-files-api-filter-by-tabular-tags.md new file mode 100644 index 00000000000..9c3fced1741 --- /dev/null +++ b/doc/release-notes/9972-files-api-filter-by-tabular-tags.md @@ -0,0 +1,3 @@ +- New query parameter `tabularTagName` added to the getVersionFiles endpoint (/api/datasets/{id}/versions/{versionId}/files) to return files to which the particular tabular tag has been added. + +- New endpoint to set tabular file tags via API: /api/files/{id}/metadata/tabularTags. diff --git a/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md new file mode 100644 index 00000000000..020224b2094 --- /dev/null +++ b/doc/release-notes/9995-files-api-downloadsize-with-criteria-and-deaccessioned-support.md @@ -0,0 +1,12 @@ +Extended the getDownloadSize endpoint (/api/datasets/{id}/versions/{versionId}/downloadsize), including the following new features: + +- The endpoint now accepts a new boolean optional query parameter "includeDeaccessioned", which, if enabled, causes the endpoint to consider deaccessioned dataset versions when searching for versions to obtain the file total download size. + + +- The endpoint now supports filtering by criteria. In particular, it accepts the following optional criteria query parameters: + + - contentType + - accessStatus + - categoryName + - tabularTagName + - searchText diff --git a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv index 8543300dd2c..4f4c29d0670 100644 --- a/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv +++ b/doc/sphinx-guides/source/_static/admin/dataverse-external-tools.tsv @@ -2,6 +2,6 @@ Tool Type Scope Description Data Explorer explore file "A GUI which lists the variables in a tabular data file allowing searching, charting and cross tabulation analysis. See the README.md file at https://github.com/scholarsportal/dataverse-data-explorer-v2 for the instructions on adding Data Explorer to your Dataverse." Whole Tale explore dataset "A platform for the creation of reproducible research packages that allows users to launch containerized interactive analysis environments based on popular tools such as Jupyter and RStudio. Using this integration, Dataverse users can launch Jupyter and RStudio environments to analyze published datasets. For more information, see the `Whole Tale User Guide `_." Binder explore dataset Binder allows you to spin up custom computing environments in the cloud (including Jupyter notebooks) with the files from your dataset. `Installation instructions `_ are in the Data Exploration Lab girder_ythub project. -File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" +File Previewers explore file "A set of tools that display the content of files - including audio, html, `Hypothes.is `_ annotations, images, PDF, Markdown, text, video, tabular data, spreadsheets, GeoJSON, zip, and NcML files - allowing them to be viewed without downloading the file. The previewers can be run directly from github.io, so the only required step is using the Dataverse API to register the ones you want to use. Documentation, including how to optionally brand the previewers, and an invitation to contribute through github are in the README.md file. Initial development was led by the Qualitative Data Repository and the spreasdheet previewer was added by the Social Sciences and Humanities Open Cloud (SSHOC) project. https://github.com/gdcc/dataverse-previewers" Data Curation Tool configure file "A GUI for curating data by adding labels, groups, weights and other details to assist with informed reuse. See the README.md file at https://github.com/scholarsportal/Dataverse-Data-Curation-Tool for the installation instructions." Ask the Data query file Ask the Data is an experimental tool that allows you ask natural language questions about the data contained in Dataverse tables (tabular data). See the README.md file at https://github.com/IQSS/askdataverse/tree/main/askthedata for the instructions on adding Ask the Data to your Dataverse installation. diff --git a/doc/sphinx-guides/source/api/dataaccess.rst b/doc/sphinx-guides/source/api/dataaccess.rst index 21be702d62b..f7aaa8f4ee4 100755 --- a/doc/sphinx-guides/source/api/dataaccess.rst +++ b/doc/sphinx-guides/source/api/dataaccess.rst @@ -404,6 +404,18 @@ A curl example using an ``id``:: curl -H "X-Dataverse-key:$API_TOKEN" -X GET http://$SERVER/api/access/datafile/{id}/listRequests +User Has Requested Access to a File: +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``/api/access/datafile/{id}/userFileAccessRequested`` + +This method returns true or false depending on whether or not the calling user has requested access to a particular file. + +A curl example using an ``id``:: + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "http://$SERVER/api/access/datafile/{id}/userFileAccessRequested" + + Get User Permissions on a File: ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -414,6 +426,7 @@ This method returns the permissions that the calling user has on a particular fi In particular, the user permissions that this method checks, returned as booleans, are the following: * Can download the file +* Can manage the file permissions * Can edit the file owner dataset A curl example using an ``id``:: diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 060cd829cdf..1992390410c 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -889,6 +889,10 @@ It returns a list of versions with their metadata, and file list: ] } +The optional ``includeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``true``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. + +The optional ``offset`` and ``limit`` parameters can be used to specify the range of the versions list to be shown. This can be used to paginate through the list in a dataset with a large number of versions. + Get Version of a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -901,13 +905,26 @@ Get Version of a Dataset export ID=24 export VERSION=1.0 - curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION" + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION?includeFiles=false" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/datasets/24/versions/1.0" + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?includeFiles=false" + +The optional ``includeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``true``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. + + +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?includeDeaccessioned=true" .. _export-dataset-metadata-api: @@ -964,12 +981,59 @@ The fully expanded example above (without environment variables) looks like this curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files" -This endpoint supports optional pagination, through the ``limit`` and ``offset`` query params: +This endpoint supports optional pagination, through the ``limit`` and ``offset`` query parameters: .. code-block:: bash curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?limit=10&offset=20" +Category name filtering is also optionally supported. To return files to which the requested category has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?categoryName=Data" + +Tabular tag name filtering is also optionally supported. To return files to which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?tabularTagName=Survey" + +Content type filtering is also optionally supported. To return files matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return the files that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?accessStatus=Public" + Ordering criteria for sorting the results is also optionally supported. In particular, by the following possible values: * ``NameAZ`` (Default) @@ -979,14 +1043,114 @@ Ordering criteria for sorting the results is also optionally supported. In parti * ``Size`` * ``Type`` -Please note that these values are case sensitive and must be correctly typed for the endpoint to recognize them. - Usage example: .. code-block:: bash curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?orderCriteria=Newest" +Please note that both filtering and ordering criteria values are case sensitive and must be correctly typed for the endpoint to recognize them. + +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files?includeDeaccessioned=true" + +.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. + +Get File Counts in a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Get file counts, for the given dataset and version. + +The returned file counts are based on different criteria: + +- Total (The total file count) +- Per content type +- Per category name +- Per tabular tag name +- Per access status (Possible values: Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic) + +.. code-block:: bash + + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSION=1.0 + + curl "$SERVER_URL/api/datasets/$ID/versions/$VERSION/files/counts" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts" + +Category name filtering is optionally supported. To return counts only for files to which the requested category has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?categoryName=Data" + +Tabular tag name filtering is also optionally supported. To return counts only for files to which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?tabularTagName=Survey" + +Content type filtering is also optionally supported. To return counts only for files matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return counts only for files that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?accessStatus=Public" + +By default, deaccessioned dataset versions are not supported by this endpoint and will be ignored in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a not found error if the version is deaccessioned and you do not enable the option described below. + +If you want to include deaccessioned dataset versions, you must specify this through the ``includeDeaccessioned`` query parameter. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/files/counts?includeDeaccessioned=true" + +Please note that filtering values are case sensitive and must be correctly typed for the endpoint to recognize them. + +Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. + View Dataset Files and Folders as a Directory Index ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1283,6 +1447,39 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/datasets/24/versions/:draft" +Deaccession Dataset +~~~~~~~~~~~~~~~~~~~ + +Given a version of a dataset, updates its status to deaccessioned. + +The JSON body required to deaccession a dataset (``deaccession.json``) looks like this:: + + { + "deaccessionReason": "Description of the deaccession reason.", + "deaccessionForwardURL": "https://demo.dataverse.org" + } + + +Note that the field ``deaccessionForwardURL`` is optional. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export VERSIONID=1.0 + export FILE_PATH=deaccession.json + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/datasets/$ID/versions/$VERSIONID/deaccession" -H "Content-type:application/json" --upload-file $FILE_PATH + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST "https://demo.dataverse.org/api/datasets/24/versions/1.0/deaccession" -H "Content-type:application/json" --upload-file deaccession.json + +.. note:: You cannot deaccession a dataset more than once. If you call this endpoint twice for the same dataset version, you will get a not found error on the second call, since the dataset you are looking for will no longer be published since it is already deaccessioned. + Set Citation Date Field Type for a Dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1696,6 +1893,73 @@ The fully expanded example above (without environment variables) looks like this The size of all files available for download will be returned. If :draft is passed as versionId the token supplied must have permission to view unpublished drafts. A token is not required for published datasets. Also restricted files will be included in this total regardless of whether the user has access to download the restricted file(s). +There is an optional query parameter ``mode`` which applies a filter criteria to the operation. This parameter supports the following values: + +* ``All`` (Default): Includes both archival and original sizes for tabular files +* ``Archival``: Includes only the archival size for tabular files +* ``Original``: Includes only the original size for tabular files + +Usage example: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?mode=Archival" + +Category name filtering is also optionally supported. To return the size of all files available for download matching the requested category name. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?categoryName=Data" + +Tabular tag name filtering is also optionally supported. To return the size of all files available for download for which the requested tabular tag has been added. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?tabularTagName=Survey" + +Content type filtering is also optionally supported. To return the size of all files available for download matching the requested content type. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?contentType=image/png" + +Filtering by search text is also optionally supported. The search will be applied to the labels and descriptions of the dataset files, to return the size of all files available for download that contain the text searched in one of such fields. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?searchText=word" + +File access filtering is also optionally supported. In particular, by the following possible values: + +* ``Public`` +* ``Restricted`` +* ``EmbargoedThenRestricted`` +* ``EmbargoedThenPublic`` + +If no filter is specified, the files will match all of the above categories. + +Please note that filtering query parameters are case sensitive and must be correctly typed for the endpoint to recognize them. + +By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. + +If you want to include deaccessioned dataset versions, you must set ``includeDeaccessioned`` query parameter to ``true``. + +Usage example: + +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0/downloadsize?includeDeaccessioned=true" + +.. note:: Keep in mind that you can combine all of the above query parameters depending on the results you are looking for. + Submit a Dataset for Review ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2202,11 +2466,11 @@ Signposting involves the addition of a `Link ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/json+ld", ;rel="type",;rel="type", https://demo.dataverse.org/api/datasets/:persistentId/versions/1.0/customlicense?persistentId=doi:10.5072/FK2/YD5QDG;rel="license", ; rel="linkset";type="application/linkset+json"`` +``Link: ;rel="cite-as", ;rel="describedby";type="application/vnd.citationstyles.csl+json",;rel="describedby";type="application/ld+json", ;rel="type",;rel="type", ;rel="license", ; rel="linkset";type="application/linkset+json"`` The URL for linkset information is discoverable under the ``rel="linkset";type="application/linkset+json`` entry in the "Link" header, such as in the example above. -The reponse includes a JSON object conforming to the `Signposting `__ specification. +The reponse includes a JSON object conforming to the `Signposting `__ specification. As part of this conformance, unlike most Dataverse API responses, the output is not wrapped in a ``{"status":"OK","data":{`` object. Signposting is not supported for draft dataset versions. .. code-block:: bash @@ -2304,6 +2568,26 @@ The API can also be used to reset the dataset to use the default/inherited value curl -X DELETE -H "X-Dataverse-key:$API_TOKEN" -H Content-type:application/json "$SERVER_URL/api/datasets/:persistentId/guestbookEntryAtRequest?persistentId=$PERSISTENT_IDENTIFIER" +Get User Permissions on a Dataset +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +This API call returns the permissions that the calling user has on a particular dataset. + +In particular, the user permissions that this API call checks, returned as booleans, are the following: + +* Can view the unpublished dataset +* Can edit the dataset +* Can publish the dataset +* Can manage the dataset permissions +* Can delete the dataset draft + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key: $API_TOKEN" -X GET "$SERVER_URL/api/datasets/$ID/userPermissions" Files @@ -2832,13 +3116,13 @@ A curl example using an ``ID`` export SERVER_URL=https://demo.dataverse.org export ID=24 - curl "$SERVER_URL/api/files/$ID/downloadCount" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/downloadCount" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/24/downloadCount" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/downloadCount" A curl example using a ``PERSISTENT_ID`` @@ -2848,16 +3132,53 @@ A curl example using a ``PERSISTENT_ID`` export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/AAA000 - curl "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/downloadCount?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/downloadCount?persistentId=doi:10.5072/FK2/AAA000" If you are interested in download counts for multiple files, see :doc:`/api/metrics`. +File Has Been Deleted +~~~~~~~~~~~~~~~~~~~~~ + +Know if a particular file that existed in a previous version of the dataset no longer exists in the latest version. + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/$ID/hasBeenDeleted" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/24/hasBeenDeleted" + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + + curl -H "X-Dataverse-key:$API_TOKEN" -X GET "$SERVER_URL/api/files/:persistentId/hasBeenDeleted?persistentId=$PERSISTENT_ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X GET "https://demo.dataverse.org/api/files/:persistentId/hasBeenDeleted?persistentId=doi:10.5072/FK2/AAA000" + Updating File Metadata ~~~~~~~~~~~~~~~~~~~~~~ @@ -2872,7 +3193,7 @@ A curl example using an ``ID`` export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "$SERVER_URL/api/files/$ID/metadata" The fully expanded example above (without environment variables) looks like this: @@ -2880,7 +3201,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "https://demo.dataverse.org/api/files/24/metadata" A curl example using a ``PERSISTENT_ID`` @@ -2892,7 +3213,7 @@ A curl example using a ``PERSISTENT_ID`` export PERSISTENT_ID=doi:10.5072/FK2/AAA000 curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "$SERVER_URL/api/files/:persistentId/metadata?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: @@ -2900,13 +3221,141 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ - -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"restrict":false}' \ + -F 'jsonData={"description":"My description bbb.","provFreeform":"Test prov freeform","categories":["Data"],"dataFileTags":["Survey"],"restrict":false}' \ "https://demo.dataverse.org/api/files/:persistentId/metadata?persistentId=doi:10.5072/FK2/AAA000" +Note: To update the 'tabularTags' property of file metadata, use the 'dataFileTags' key when making API requests. This property is used to update the 'tabularTags' of the file metadata. + Also note that dataFileTags are not versioned and changes to these will update the published version of the file. .. _EditingVariableMetadata: +Updating File Metadata Categories +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates the categories for an existing file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the category names. + +Although updating categories can also be done with the previous endpoint, this has been created to be more practical when it is only necessary to update categories and not other metadata fields. + +The JSON representation of file categories (``categories.json``) looks like this:: + + { + "categories": [ + "Data", + "Custom" + ] + } + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export FILE_PATH=categories.json + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + "$SERVER_URL/api/files/$ID/metadata/categories" \ + -H "Content-type:application/json" --upload-file $FILE_PATH + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + "http://demo.dataverse.org/api/files/24/metadata/categories" \ + -H "Content-type:application/json" --upload-file categories.json + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export FILE_PATH=categories.json + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + "$SERVER_URL/api/files/:persistentId/metadata/categories?persistentId=$PERSISTENT_ID" \ + -H "Content-type:application/json" --upload-file $FILE_PATH + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + "https://demo.dataverse.org/api/files/:persistentId/metadata/categories?persistentId=doi:10.5072/FK2/AAA000" \ + -H "Content-type:application/json" --upload-file categories.json + +Note that if the specified categories do not exist, they will be created. + +Updating File Tabular Tags +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates the tabular tags for an existing tabular file where ``ID`` is the database id of the file to update or ``PERSISTENT_ID`` is the persistent id (DOI or Handle) of the file. Requires a ``jsonString`` expressing the tabular tag names. + +The JSON representation of tabular tags (``tags.json``) looks like this:: + + { + "tabularTags": [ + "Survey", + "Genomics" + ] + } + +A curl example using an ``ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=24 + export FILE_PATH=tags.json + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + "$SERVER_URL/api/files/$ID/metadata/tabularTags" \ + -H "Content-type:application/json" --upload-file $FILE_PATH + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + "http://demo.dataverse.org/api/files/24/metadata/tabularTags" \ + -H "Content-type:application/json" --upload-file tags.json + +A curl example using a ``PERSISTENT_ID`` + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export PERSISTENT_ID=doi:10.5072/FK2/AAA000 + export FILE_PATH=tags.json + + curl -H "X-Dataverse-key:$API_TOKEN" -X POST \ + "$SERVER_URL/api/files/:persistentId/metadata/tabularTags?persistentId=$PERSISTENT_ID" \ + -H "Content-type:application/json" --upload-file $FILE_PATH + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST \ + "https://demo.dataverse.org/api/files/:persistentId/metadata/tabularTags?persistentId=doi:10.5072/FK2/AAA000" \ + -H "Content-type:application/json" --upload-file tags.json + +Note that the specified tabular tags must be valid. The supported tags are: + +* ``Survey`` +* ``Time Series`` +* ``Panel`` +* ``Event`` +* ``Genomics`` +* ``Network`` +* ``Geospatial`` + Editing Variable Level Metadata ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/sphinx-guides/source/developers/documentation.rst b/doc/sphinx-guides/source/developers/documentation.rst index 348d17e5c8f..d07b5b63f72 100755 --- a/doc/sphinx-guides/source/developers/documentation.rst +++ b/doc/sphinx-guides/source/developers/documentation.rst @@ -8,7 +8,7 @@ Writing Documentation Quick Fix ----------- -If you find a typo or a small error in the documentation you can fix it using GitHub's online web editor. Generally speaking, we will be following https://help.github.com/en/articles/editing-files-in-another-users-repository +If you find a typo or a small error in the documentation you can fix it using GitHub's online web editor. Generally speaking, we will be following https://docs.github.com/en/repositories/working-with-files/managing-files/editing-files#editing-files-in-another-users-repository - Navigate to https://github.com/IQSS/dataverse/tree/develop/doc/sphinx-guides/source where you will see folders for each of the guides: `admin`_, `api`_, `developers`_, `installation`_, `style`_, `user`_. - Find the file you want to edit under one of the folders above. diff --git a/doc/sphinx-guides/source/developers/testing.rst b/doc/sphinx-guides/source/developers/testing.rst index 9135f7bf7ea..abecaa09fad 100755 --- a/doc/sphinx-guides/source/developers/testing.rst +++ b/doc/sphinx-guides/source/developers/testing.rst @@ -316,7 +316,7 @@ Please make sure to: .. code:: java /** A very minimal example for a Testcontainers integration test class. */ - @Testcontainers + @Testcontainers(disabledWithoutDocker = true) @Tag(edu.harvard.iq.dataverse.util.testing.Tags.INTEGRATION_TEST) @Tag(edu.harvard.iq.dataverse.util.testing.Tags.USES_TESTCONTAINERS) class MyExampleIT { /* ... */ } diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index a09b6f4e5d9..13a7367de44 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -1276,6 +1276,8 @@ The list below depicts a set of tools that can be used to ease the amount of wor - `easyTranslationHelper `_, a tool developed by `University of Aveiro `_. +- `Dataverse General User Interface Translation Guide for Weblate `_, a guide produced as part of the `SSHOC Dataverse Translation `_ event. + .. _Web-Analytics-Code: Web Analytics Code @@ -1771,8 +1773,8 @@ protocol, host, and port number and should not include a trailing slash. dataverse.files.directory +++++++++++++++++++++++++ -Please provide an absolute path to a directory backed by some mounted file system. This directory is used for a number -of purposes: +Providing an explicit location here makes it easier to reuse some mounted filesystem and we recommend doing so +to avoid filled up disks, aid in performance, etc. This directory is used for a number of purposes: 1. ``/temp`` after uploading, data is temporarily stored here for ingest and/or before shipping to the final storage destination. @@ -1785,24 +1787,51 @@ of purposes: under certain conditions. This directory may also be used by file stores for :ref:`permanent file storage `, but this is controlled by other, store-specific settings. -Defaults to ``/tmp/dataverse``. Can also be set via *MicroProfile Config API* sources, e.g. the environment variable -``DATAVERSE_FILES_DIRECTORY``. Defaults to ``${STORAGE_DIR}`` for profile ``ct``, important for the -:ref:`Dataverse Application Image `. +Notes: + +- Please provide an absolute path to a directory backed by some mounted file system. +- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_DIRECTORY``. +- Defaults to ``/tmp/dataverse`` in a :doc:`default installation `. +- Defaults to ``${STORAGE_DIR}`` using our :ref:`Dataverse container ` (resolving to ``/dv``). +- During startup, this directory will be checked for existence and write access. It will be created for you + if missing. If it cannot be created or does not have proper write access, application deployment will fail. .. _dataverse.files.uploads: dataverse.files.uploads +++++++++++++++++++++++ -Configure a folder to store the incoming file stream during uploads (before transfering to `${dataverse.files.directory}/temp`). +Configure a folder to store the incoming file stream during uploads (before transfering to ``${dataverse.files.directory}/temp``). +Providing an explicit location here makes it easier to reuse some mounted filesystem. Please also see :ref:`temporary-file-storage` for more details. -You can use an absolute path or a relative, which is relative to the application server domain directory. -Defaults to ``./uploads``, which resolves to ``/usr/local/payara6/glassfish/domains/domain1/uploads`` in a default -installation. +Notes: + +- Please provide an absolute path to a directory backed by some mounted file system. +- Defaults to ``${com.sun.aas.instanceRoot}/uploads`` in a :doc:`default installation ` + (resolving to ``/usr/local/payara6/glassfish/domains/domain1/uploads``). +- Defaults to ``${STORAGE_DIR}/uploads`` using our :ref:`Dataverse container ` (resolving to ``/dv/uploads``). +- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``. +- During startup, this directory will be checked for existence and write access. It will be created for you + if missing. If it cannot be created or does not have proper write access, application deployment will fail. + +.. _dataverse.files.docroot: + +dataverse.files.docroot ++++++++++++++++++++++++ + +Configure a folder to store and retrieve additional materials like user uploaded collection logos, generated sitemaps, +and so on. Providing an explicit location here makes it easier to reuse some mounted filesystem. +See also logo customization above. + +Notes: -Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_UPLOADS``. -Defaults to ``${STORAGE_DIR}/uploads`` for profile ``ct``, important for the :ref:`Dataverse Application Image `. +- Defaults to ``${com.sun.aas.instanceRoot}/docroot`` in a :doc:`default installation ` + (resolves to ``/usr/local/payara6/glassfish/domains/domain1/docroot``). +- Defaults to ``${STORAGE_DIR}/docroot`` using our :ref:`Dataverse container ` (resolving to ``/dv/docroot``). +- Can also be set via *MicroProfile Config API* sources, e.g. the environment variable ``DATAVERSE_FILES_DOCROOT``. +- During startup, this directory will be checked for existence and write access. It will be created for you + if missing. If it cannot be created or does not have proper write access, application deployment will fail. dataverse.auth.password-reset-timeout-in-minutes ++++++++++++++++++++++++++++++++++++++++++++++++ @@ -3036,12 +3065,18 @@ You can override this global setting on a per-format basis for the following for - SAV - Rdata - CSV -- XLSX +- XLSX (in lower-case) -For example, if you want your Dataverse installation to not attempt to ingest Rdata files larger than 1 MB, use this setting: +For example : + +* if you want your Dataverse installation to not attempt to ingest Rdata files larger than 1 MB, use this setting: ``curl -X PUT -d 1000000 http://localhost:8080/api/admin/settings/:TabularIngestSizeLimit:Rdata`` +* if you want your Dataverse installation to not attempt to ingest XLSX files at all, use this setting: + +``curl -X PUT -d 0 http://localhost:8080/api/admin/settings/:TabularIngestSizeLimit:xlsx`` + :ZipUploadFilesLimit ++++++++++++++++++++ diff --git a/doc/sphinx-guides/source/user/dataset-management.rst b/doc/sphinx-guides/source/user/dataset-management.rst index 65ff7e0bc7a..ac541bdcee9 100755 --- a/doc/sphinx-guides/source/user/dataset-management.rst +++ b/doc/sphinx-guides/source/user/dataset-management.rst @@ -200,6 +200,7 @@ Previewers are available for the following file types: - Text - PDF +- Markdown - Tabular (CSV, Excel, etc., see :doc:`tabulardataingest/index`) - Code (R, etc.) - Images (PNG, GIF, JPG) diff --git a/doc/sphinx-guides/source/user/dataverse-management.rst b/doc/sphinx-guides/source/user/dataverse-management.rst index 3d301619105..0e0fbcc0883 100755 --- a/doc/sphinx-guides/source/user/dataverse-management.rst +++ b/doc/sphinx-guides/source/user/dataverse-management.rst @@ -214,7 +214,7 @@ Dataset linking allows a Dataverse collection owner to "link" their Dataverse co For example, researchers working on a collaborative study across institutions can each link their own individual institutional Dataverse collections to the one collaborative dataset, making it easier for interested parties from each institution to find the study. -In order to link a dataset, you will need your account to have the "Add Dataset" permission on the Dataverse collection that is doing the linking. If you created the Dataverse collection then you should have this permission already, but if not then you will need to ask the admin of that Dataverse collection to assign that permission to your account. You do not need any special permissions on the dataset being linked. +In order to link a dataset, you will need your account to have the "Publish Dataset" permission on the Dataverse collection that is doing the linking. If you created the Dataverse collection then you should have this permission already, but if not then you will need to ask the admin of that Dataverse collection to assign that permission to your account. You do not need any special permissions on the dataset being linked. To link a dataset to your Dataverse collection, you must navigate to that dataset and click the white "Link" button in the upper-right corner of the dataset page. This will open up a window where you can type in the name of the Dataverse collection that you would like to link the dataset to. Select your Dataverse collection and click the save button. This will establish the link, and the dataset will now appear under your Dataverse collection. diff --git a/docker-compose-dev.yml b/docker-compose-dev.yml index 694f2046ca8..3bccec2734a 100644 --- a/docker-compose-dev.yml +++ b/docker-compose-dev.yml @@ -18,6 +18,7 @@ services: - DATAVERSE_AUTH_OIDC_CLIENT_ID=test - DATAVERSE_AUTH_OIDC_CLIENT_SECRET=94XHrfNRwXsjqTqApRrwWmhDLDHpIYV8 - DATAVERSE_AUTH_OIDC_AUTH_SERVER_URL=http://keycloak.mydomain.com:8090/realms/test + - DATAVERSE_JSF_REFRESH_PERIOD=1 ports: - "8080:8080" # HTTP (Dataverse Application) - "4848:4848" # HTTP (Payara Admin Console) @@ -28,9 +29,13 @@ services: depends_on: - dev_postgres - dev_solr + - dev_dv_initializer volumes: - ./docker-dev-volumes/app/data:/dv - ./docker-dev-volumes/app/secrets:/secrets + # Uncomment for changes to xhtml to be deployed immediately (if supported your IDE or toolchain). + # Replace 6.0 with the current version. + # - ./target/dataverse-6.0:/opt/payara/deployments/dataverse tmpfs: - /dumps:mode=770,size=2052M,uid=1000,gid=1000 - /tmp:mode=770,size=2052M,uid=1000,gid=1000 @@ -48,6 +53,17 @@ services: networks: - dataverse + dev_dv_initializer: + container_name: "dev_dv_initializer" + image: gdcc/configbaker:unstable + restart: "no" + command: + - sh + - -c + - "fix-fs-perms.sh dv" + volumes: + - ./docker-dev-volumes/app/data:/dv + dev_postgres: container_name: "dev_postgres" hostname: postgres diff --git a/modules/container-configbaker/Dockerfile b/modules/container-configbaker/Dockerfile index 564216b3572..9b98334d72b 100644 --- a/modules/container-configbaker/Dockerfile +++ b/modules/container-configbaker/Dockerfile @@ -26,8 +26,12 @@ RUN true && \ # Make our working directories mkdir -p ${SCRIPT_DIR} ${SECRETS_DIR} ${SOLR_TEMPLATE} -# Get in the scripts and make them executable (just in case...) +# Get in the scripts COPY maven/scripts maven/solr/update-fields.sh ${SCRIPT_DIR}/ +# Copy the data from scripts/api that provide the common base setup you'd get from the installer. +# ".dockerignore" will take care of taking only the bare necessities +COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ +# Make the scripts executable RUN chmod +x ${SCRIPT_DIR}/*.sh ${BOOTSTRAP_DIR}/*/*.sh # Copy the Solr config bits @@ -35,9 +39,6 @@ COPY --from=solr /opt/solr/server/solr/configsets/_default ${SOLR_TEMPLATE}/ COPY maven/solr/*.xml ${SOLR_TEMPLATE}/conf/ RUN rm ${SOLR_TEMPLATE}/conf/managed-schema.xml -# Copy the data from scripts/api that provide the common base setup you'd get from the installer. -# ".dockerignore" will take care of taking only the bare necessities -COPY maven/setup ${SCRIPT_DIR}/bootstrap/base/ # Set the entrypoint to tini (as a process supervisor) ENTRYPOINT ["/usr/bin/dumb-init", "--"] diff --git a/modules/dataverse-parent/pom.xml b/modules/dataverse-parent/pom.xml index bfa11af6c70..db0fa46a952 100644 --- a/modules/dataverse-parent/pom.xml +++ b/modules/dataverse-parent/pom.xml @@ -165,7 +165,7 @@ 4.4.14 - 5.1.0 + 5.2.0 1.19.0 @@ -426,6 +426,7 @@ https://artifacts.unidata.ucar.edu/repository/unidata-all/ + + --> diff --git a/pom.xml b/pom.xml index e70b723cad5..5536bcccb05 100644 --- a/pom.xml +++ b/pom.xml @@ -359,12 +359,12 @@ org.ocpsoft.rewrite rewrite-servlet - 6.0.0-SNAPSHOT + 10.0.0.Final org.ocpsoft.rewrite rewrite-config-prettyfaces - 6.0.0-SNAPSHOT + 10.0.0.Final edu.ucsb.nceas diff --git a/src/main/docker/Dockerfile b/src/main/docker/Dockerfile index aa39078fb06..ed670294873 100644 --- a/src/main/docker/Dockerfile +++ b/src/main/docker/Dockerfile @@ -29,6 +29,11 @@ FROM $BASE_IMAGE # See also https://download.eclipse.org/microprofile/microprofile-config-3.0/microprofile-config-spec-3.0.html#configprofile ENV MP_CONFIG_PROFILE=ct +# Workaround to configure upload directories by default to useful place until we can have variable lookups in +# defaults for glassfish-web.xml and other places. +ENV DATAVERSE_FILES_UPLOADS="${STORAGE_DIR}/uploads" +ENV DATAVERSE_FILES_DOCROOT="${STORAGE_DIR}/docroot" + # Copy app and deps from assembly in proper layers COPY --chown=payara:payara maven/deps ${DEPLOY_DIR}/dataverse/WEB-INF/lib/ COPY --chown=payara:payara maven/app ${DEPLOY_DIR}/dataverse/ diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index ba13729793e..407282a5372 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -386,7 +386,17 @@ public JsonArrayBuilder getTagLabelsAsJsonArrayBuilder(){ public void setTags(List dataFileTags) { this.dataFileTags = dataFileTags; } - + + public void addUniqueTagByLabel(String tagLabel) throws IllegalArgumentException { + if (tagExists(tagLabel)) { + return; + } + DataFileTag tag = new DataFileTag(); + tag.setTypeByLabel(tagLabel); + tag.setDataFile(this); + addTag(tag); + } + public void addTag(DataFileTag tag) { if (dataFileTags == null) { dataFileTags = new ArrayList<>(); @@ -1093,8 +1103,12 @@ public String getTargetUrl() { return DataFile.TARGET_URL; } + private boolean tagExists(String tagLabel) { + for (DataFileTag dataFileTag : dataFileTags) { + if (dataFileTag.getTypeLabel().equals(tagLabel)) { + return true; + } + } + return false; + } } // end of class - - - - diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java index f4f66d3c874..351c4032939 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFileTag.java @@ -58,7 +58,7 @@ public enum TagType {Survey, TimeSeries, Panel, Event, Genomics, Network, Geospa private static final Map TagTypeToLabels = new HashMap<>(); - private static final Map TagLabelToTypes = new HashMap<>(); + public static final Map TagLabelToTypes = new HashMap<>(); static { diff --git a/src/main/java/edu/harvard/iq/dataverse/Dataset.java b/src/main/java/edu/harvard/iq/dataverse/Dataset.java index 21014d7f99c..245bdf0efd2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/Dataset.java +++ b/src/main/java/edu/harvard/iq/dataverse/Dataset.java @@ -158,6 +158,23 @@ public void setCitationDateDatasetFieldType(DatasetFieldType citationDateDataset this.citationDateDatasetFieldType = citationDateDatasetFieldType; } + // Per DataCite best practices, the citation date of a dataset may need + // to be adjusted to reflect the latest embargo availability date of any + // file within the first published version. + // If any files are embargoed in the first version, this date will be + // calculated and cached here upon its publication, in the + // FinalizeDatasetPublicationCommand. + private Timestamp embargoCitationDate; + + public Timestamp getEmbargoCitationDate() { + return embargoCitationDate; + } + + public void setEmbargoCitationDate(Timestamp embargoCitationDate) { + this.embargoCitationDate = embargoCitationDate; + } + + @ManyToOne @JoinColumn(name="template_id",nullable = true) @@ -676,20 +693,10 @@ public Timestamp getCitationDate() { Timestamp citationDate = null; //Only calculate if this dataset doesn't use an alternate date field for publication date if (citationDateDatasetFieldType == null) { - List versions = this.versions; - // TODo - is this ever not version 1.0 (or draft if not published yet) - DatasetVersion oldest = versions.get(versions.size() - 1); citationDate = super.getPublicationDate(); - if (oldest.isPublished()) { - List fms = oldest.getFileMetadatas(); - for (FileMetadata fm : fms) { - Embargo embargo = fm.getDataFile().getEmbargo(); - if (embargo != null) { - Timestamp embDate = Timestamp.valueOf(embargo.getDateAvailable().atStartOfDay()); - if (citationDate.compareTo(embDate) < 0) { - citationDate = embDate; - } - } + if (embargoCitationDate != null) { + if (citationDate.compareTo(embargoCitationDate) < 0) { + return embargoCitationDate; } } } diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java index 7746099818e..8ac98500890 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValue.java @@ -113,7 +113,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetField)) { + if (!(object instanceof DatasetFieldDefaultValue)) { return false; } DatasetFieldDefaultValue other = (DatasetFieldDefaultValue) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java index 620d4bf3e09..ce2b00086ec 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetFieldServiceBean.java @@ -500,7 +500,8 @@ public void process(HttpResponse response, HttpContext context) throws HttpExcep .setRetryHandler(new DefaultHttpRequestRetryHandler(3, false)) .build()) { HttpGet httpGet = new HttpGet(retrievalUri); - httpGet.addHeader("Accept", "application/json+ld, application/json"); + //application/json+ld is for backward compatibility + httpGet.addHeader("Accept", "application/ld+json, application/json+ld, application/json"); HttpResponse response = httpClient.execute(httpGet); String data = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index b2d536a0dda..fc18257196d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -2880,6 +2880,12 @@ public void sort() { public String refresh() { logger.fine("refreshing"); + //In v5.14, versionId was null here. In 6.0, it appears not to be. + //This check is to handle the null if it reappears/occurs under other circumstances + if(versionId==null) { + logger.warning("versionId was null in refresh"); + versionId = workingVersion.getId(); + } //dataset = datasetService.find(dataset.getId()); dataset = null; workingVersion = null; @@ -2889,10 +2895,9 @@ public String refresh() { DatasetVersionServiceBean.RetrieveDatasetVersionResponse retrieveDatasetVersionResponse = null; if (versionId != null) { - // versionId must have been set by now, in the init() method, - // regardless of how the page was originally called - by the dataset - // database id, by the persistent identifier, or by the db id of - // the version. + // versionId must have been set by now (see null check above), in the init() + // method, regardless of how the page was originally called - by the dataset + // database id, by the persistent identifier, or by the db id of the version. this.workingVersion = datasetVersionService.findDeep(versionId); dataset = workingVersion.getDataset(); } @@ -3391,7 +3396,7 @@ public void saveLinkingDataverses(ActionEvent evt) { FacesMessage message = new FacesMessage(FacesMessage.SEVERITY_INFO, BundleUtil.getStringFromBundle("dataset.notlinked"), linkingDataverseErrorMessage); FacesContext.getCurrentInstance().addMessage(null, message); } - + alreadyLinkedDataverses = null; //force update to list of linked dataverses } private String linkingDataverseErrorMessage = ""; @@ -3427,7 +3432,23 @@ private Boolean saveLink(Dataverse dataverse){ } return retVal; } - + + private String alreadyLinkedDataverses = null; + + public String getAlreadyLinkedDataverses(){ + if (alreadyLinkedDataverses != null) { + return alreadyLinkedDataverses; + } + List dataverseList = dataverseService.findDataversesThatLinkToThisDatasetId(dataset.getId()); + for (Dataverse dv: dataverseList){ + if (alreadyLinkedDataverses == null){ + alreadyLinkedDataverses = dv.getCurrentName(); + } else { + alreadyLinkedDataverses = alreadyLinkedDataverses + ", " + dv.getCurrentName(); + } + } + return alreadyLinkedDataverses; + } public List completeLinkingDataverse(String query) { dataset = datasetService.find(dataset.getId()); @@ -6266,7 +6287,10 @@ public String getWebloaderUrlForDataset(Dataset d) { String signpostingLinkHeader = null; public String getSignpostingLinkHeader() { - if (!workingVersion.isReleased()) { + if ((workingVersion==null) || (!workingVersion.isReleased())) { + if(workingVersion==null) { + logger.warning("workingVersion was null in getSignpostingLinkHeader"); + } return null; } if (signpostingLinkHeader == null) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java index bc79b4a7107..c6df2a2e1ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java @@ -137,7 +137,7 @@ public Dataset findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments") .getSingleResult(); } - + public List findByOwnerId(Long ownerId) { return findByOwnerId(ownerId, false); } @@ -788,13 +788,13 @@ public void exportDataset(Dataset dataset, boolean forceReExport) { } } } - + } //get a string to add to save success message //depends on page (dataset/file) and user privleges public String getReminderString(Dataset dataset, boolean canPublishDataset, boolean filePage, boolean isValid) { - + String reminderString; if (canPublishDataset) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java index 93f45bd288e..5fd963f3931 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java @@ -68,7 +68,13 @@ query = "SELECT OBJECT(o) FROM DatasetVersion AS o WHERE o.dataset.harvestedFrom IS NULL and o.releaseTime IS NOT NULL and o.archivalCopyLocation IS NULL" ), @NamedQuery(name = "DatasetVersion.findById", - query = "SELECT o FROM DatasetVersion o LEFT JOIN FETCH o.fileMetadatas WHERE o.id=:id")}) + query = "SELECT o FROM DatasetVersion o LEFT JOIN FETCH o.fileMetadatas WHERE o.id=:id"), + @NamedQuery(name = "DatasetVersion.findByDataset", + query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC"), + @NamedQuery(name = "DatasetVersion.findReleasedByDataset", + query = "SELECT o FROM DatasetVersion o WHERE o.dataset.id=:datasetId AND o.versionState=edu.harvard.iq.dataverse.DatasetVersion.VersionState.RELEASED ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC")/*, + @NamedQuery(name = "DatasetVersion.findVersionElements", + query = "SELECT o.id, o.versionState, o.versionNumber, o.minorVersionNumber FROM DatasetVersion o WHERE o.dataset.id=:datasetId ORDER BY o.versionNumber DESC, o.minorVersionNumber DESC")*/}) @Entity diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java new file mode 100644 index 00000000000..78fd896c897 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionFilesServiceBean.java @@ -0,0 +1,321 @@ +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.FileSearchCriteria.FileAccessStatus; +import jakarta.ejb.Stateless; +import jakarta.inject.Named; +import jakarta.persistence.EntityManager; +import jakarta.persistence.PersistenceContext; +import jakarta.persistence.Tuple; +import jakarta.persistence.TypedQuery; +import jakarta.persistence.criteria.*; + +import java.io.Serializable; +import java.sql.Timestamp; +import java.util.*; + +import static edu.harvard.iq.dataverse.DataFileTag.TagLabelToTypes; + +@Stateless +@Named +public class DatasetVersionFilesServiceBean implements Serializable { + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + /** + * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionFilesServiceBean#getFileMetadatas} + */ + public enum FileOrderCriteria { + NameAZ, NameZA, Newest, Oldest, Size, Type + } + + /** + * Mode to base the search in {@link DatasetVersionFilesServiceBean#getFilesDownloadSize(DatasetVersion, FileSearchCriteria, FileDownloadSizeMode)} + *

+ * All: Includes both archival and original sizes for tabular files + * Archival: Includes only the archival size for tabular files + * Original: Includes only the original size for tabular files + *

+ * All the modes include archival sizes for non-tabular files + */ + public enum FileDownloadSizeMode { + All, Original, Archival + } + + /** + * Given a DatasetVersion, returns its total file metadata count + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria + * @return long value of total file metadata count + */ + public long getFileMetadataCount(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + criteriaQuery + .select(criteriaBuilder.count(fileMetadataRoot)) + .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)); + return em.createQuery(criteriaQuery).getSingleResult(); + } + + /** + * Given a DatasetVersion, returns its file metadata count per content type + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria + * @return Map of file metadata counts per content type + */ + public Map getFileMetadataCountPerContentType(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Path contentType = fileMetadataRoot.get("dataFile").get("contentType"); + criteriaQuery + .multiselect(contentType, criteriaBuilder.count(contentType)) + .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)) + .groupBy(contentType); + return getStringLongMapResultFromQuery(criteriaQuery); + } + + /** + * Given a DatasetVersion, returns its file metadata count per category name + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria + * @return Map of file metadata counts per category name + */ + public Map getFileMetadataCountPerCategoryName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Root dataFileCategoryRoot = criteriaQuery.from(DataFileCategory.class); + Path categoryName = dataFileCategoryRoot.get("name"); + criteriaQuery + .multiselect(categoryName, criteriaBuilder.count(fileMetadataRoot)) + .where(criteriaBuilder.and( + createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot), + dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories")))) + .groupBy(categoryName); + return getStringLongMapResultFromQuery(criteriaQuery); + } + + /** + * Given a DatasetVersion, returns its file metadata count per DataFileTag.TagType + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria + * @return Map of file metadata counts per DataFileTag.TagType + */ + public Map getFileMetadataCountPerTabularTagName(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createTupleQuery(); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Root dataFileTagRoot = criteriaQuery.from(DataFileTag.class); + Path dataFileTagType = dataFileTagRoot.get("type"); + criteriaQuery + .multiselect(dataFileTagType, criteriaBuilder.count(fileMetadataRoot)) + .where(criteriaBuilder.and( + createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot), + dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags")))) + .groupBy(dataFileTagType); + List tagNameOccurrences = em.createQuery(criteriaQuery).getResultList(); + Map result = new HashMap<>(); + for (Tuple occurrence : tagNameOccurrences) { + result.put(occurrence.get(0, DataFileTag.TagType.class), occurrence.get(1, Long.class)); + } + return result; + } + + /** + * Given a DatasetVersion, returns its file metadata count per FileAccessStatus + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for counting only files matching this criteria + * @return Map of file metadata counts per FileAccessStatus + */ + public Map getFileMetadataCountPerAccessStatus(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + Map allCounts = new HashMap<>(); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Public, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.Restricted, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenPublic, searchCriteria); + addAccessStatusCountToTotal(datasetVersion, allCounts, FileAccessStatus.EmbargoedThenRestricted, searchCriteria); + return allCounts; + } + + /** + * Returns a FileMetadata list of files in the specified DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param limit for pagination, can be null + * @param offset for pagination, can be null + * @param searchCriteria for retrieving only files matching this criteria + * @param orderCriteria a FileOrderCriteria to order the results + * @return a FileMetadata list from the specified DatasetVersion + */ + public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileSearchCriteria searchCriteria, FileOrderCriteria orderCriteria) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(FileMetadata.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + criteriaQuery + .select(fileMetadataRoot) + .where(createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot)) + .orderBy(createGetFileMetadatasOrder(criteriaBuilder, orderCriteria, fileMetadataRoot)); + TypedQuery typedQuery = em.createQuery(criteriaQuery); + if (limit != null) { + typedQuery.setMaxResults(limit); + } + if (offset != null) { + typedQuery.setFirstResult(offset); + } + return typedQuery.getResultList(); + } + + /** + * Returns the total download size of all files for a particular DatasetVersion + * + * @param datasetVersion the DatasetVersion to access + * @param searchCriteria for retrieving only files matching this criteria + * @param mode a FileDownloadSizeMode to base the search on + * @return long value of total file download size + */ + public long getFilesDownloadSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria, FileDownloadSizeMode mode) { + return switch (mode) { + case All -> + Long.sum(getOriginalTabularFilesSize(datasetVersion, searchCriteria), getArchivalFilesSize(datasetVersion, false, searchCriteria)); + case Original -> + Long.sum(getOriginalTabularFilesSize(datasetVersion, searchCriteria), getArchivalFilesSize(datasetVersion, true, searchCriteria)); + case Archival -> getArchivalFilesSize(datasetVersion, false, searchCriteria); + }; + } + + private void addAccessStatusCountToTotal(DatasetVersion datasetVersion, Map totalCounts, FileAccessStatus dataFileAccessStatus, FileSearchCriteria searchCriteria) { + long fileMetadataCount = getFileMetadataCountByAccessStatus(datasetVersion, dataFileAccessStatus, searchCriteria); + if (fileMetadataCount > 0) { + totalCounts.put(dataFileAccessStatus, fileMetadataCount); + } + } + + private long getFileMetadataCountByAccessStatus(DatasetVersion datasetVersion, FileAccessStatus accessStatus, FileSearchCriteria searchCriteria) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + criteriaQuery + .select(criteriaBuilder.count(fileMetadataRoot)) + .where(criteriaBuilder.and( + createSearchCriteriaAccessStatusPredicate(accessStatus, criteriaBuilder, fileMetadataRoot), + createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))); + return em.createQuery(criteriaQuery).getSingleResult(); + } + + private Predicate createSearchCriteriaAccessStatusPredicate(FileAccessStatus accessStatus, CriteriaBuilder criteriaBuilder, Root fileMetadataRoot) { + Path dataFile = fileMetadataRoot.get("dataFile"); + Path embargo = dataFile.get("embargo"); + Predicate activelyEmbargoedPredicate = criteriaBuilder.greaterThanOrEqualTo(embargo.get("dateAvailable"), criteriaBuilder.currentDate()); + Predicate inactivelyEmbargoedPredicate = criteriaBuilder.isNull(embargo); + Path isRestricted = dataFile.get("restricted"); + Predicate isRestrictedPredicate = criteriaBuilder.isTrue(isRestricted); + Predicate isUnrestrictedPredicate = criteriaBuilder.isFalse(isRestricted); + return switch (accessStatus) { + case EmbargoedThenRestricted -> criteriaBuilder.and(activelyEmbargoedPredicate, isRestrictedPredicate); + case EmbargoedThenPublic -> criteriaBuilder.and(activelyEmbargoedPredicate, isUnrestrictedPredicate); + case Restricted -> criteriaBuilder.and(inactivelyEmbargoedPredicate, isRestrictedPredicate); + case Public -> criteriaBuilder.and(inactivelyEmbargoedPredicate, isUnrestrictedPredicate); + }; + } + + private Predicate createSearchCriteriaPredicate(DatasetVersion datasetVersion, + FileSearchCriteria searchCriteria, + CriteriaBuilder criteriaBuilder, + CriteriaQuery criteriaQuery, + Root fileMetadataRoot) { + List predicates = new ArrayList<>(); + Predicate basePredicate = criteriaBuilder.equal(fileMetadataRoot.get("datasetVersion").get("id"), datasetVersion.getId()); + predicates.add(basePredicate); + String contentType = searchCriteria.getContentType(); + if (contentType != null) { + predicates.add(criteriaBuilder.equal(fileMetadataRoot.get("dataFile").get("contentType"), contentType)); + } + FileAccessStatus accessStatus = searchCriteria.getAccessStatus(); + if (accessStatus != null) { + predicates.add(createSearchCriteriaAccessStatusPredicate(accessStatus, criteriaBuilder, fileMetadataRoot)); + } + String categoryName = searchCriteria.getCategoryName(); + if (categoryName != null) { + Root dataFileCategoryRoot = criteriaQuery.from(DataFileCategory.class); + predicates.add(criteriaBuilder.equal(dataFileCategoryRoot.get("name"), categoryName)); + predicates.add(dataFileCategoryRoot.in(fileMetadataRoot.get("fileCategories"))); + } + String tabularTagName = searchCriteria.getTabularTagName(); + if (tabularTagName != null) { + Root dataFileTagRoot = criteriaQuery.from(DataFileTag.class); + predicates.add(criteriaBuilder.equal(dataFileTagRoot.get("type"), TagLabelToTypes.get(tabularTagName))); + predicates.add(dataFileTagRoot.in(fileMetadataRoot.get("dataFile").get("dataFileTags"))); + } + String searchText = searchCriteria.getSearchText(); + if (searchText != null && !searchText.isEmpty()) { + searchText = searchText.trim().toLowerCase(); + predicates.add(criteriaBuilder.like(fileMetadataRoot.get("label"), "%" + searchText + "%")); + } + return criteriaBuilder.and(predicates.toArray(new Predicate[]{})); + } + + private Order createGetFileMetadatasOrder(CriteriaBuilder criteriaBuilder, + FileOrderCriteria orderCriteria, + Root fileMetadataRoot) { + Path label = fileMetadataRoot.get("label"); + Path dataFile = fileMetadataRoot.get("dataFile"); + Path publicationDate = dataFile.get("publicationDate"); + Path createDate = dataFile.get("createDate"); + Expression orderByLifetimeExpression = criteriaBuilder.selectCase().when(publicationDate.isNotNull(), publicationDate).otherwise(createDate); + return switch (orderCriteria) { + case NameZA -> criteriaBuilder.desc(label); + case Newest -> criteriaBuilder.desc(orderByLifetimeExpression); + case Oldest -> criteriaBuilder.asc(orderByLifetimeExpression); + case Size -> criteriaBuilder.asc(dataFile.get("filesize")); + case Type -> criteriaBuilder.asc(dataFile.get("contentType")); + default -> criteriaBuilder.asc(label); + }; + } + + private long getOriginalTabularFilesSize(DatasetVersion datasetVersion, FileSearchCriteria searchCriteria) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Root dataTableRoot = criteriaQuery.from(DataTable.class); + criteriaQuery + .select(criteriaBuilder.sum(dataTableRoot.get("originalFileSize"))) + .where(criteriaBuilder.and( + criteriaBuilder.equal(dataTableRoot.get("dataFile"), fileMetadataRoot.get("dataFile")), + createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot))); + Long result = em.createQuery(criteriaQuery).getSingleResult(); + return (result == null) ? 0 : result; + } + + private long getArchivalFilesSize(DatasetVersion datasetVersion, boolean ignoreTabular, FileSearchCriteria searchCriteria) { + CriteriaBuilder criteriaBuilder = em.getCriteriaBuilder(); + CriteriaQuery criteriaQuery = criteriaBuilder.createQuery(Long.class); + Root fileMetadataRoot = criteriaQuery.from(FileMetadata.class); + Predicate searchCriteriaPredicate = createSearchCriteriaPredicate(datasetVersion, searchCriteria, criteriaBuilder, criteriaQuery, fileMetadataRoot); + Predicate wherePredicate; + if (ignoreTabular) { + wherePredicate = criteriaBuilder.and(searchCriteriaPredicate, criteriaBuilder.isEmpty(fileMetadataRoot.get("dataFile").get("dataTables"))); + } else { + wherePredicate = searchCriteriaPredicate; + } + criteriaQuery + .select(criteriaBuilder.sum(fileMetadataRoot.get("dataFile").get("filesize"))) + .where(wherePredicate); + Long result = em.createQuery(criteriaQuery).getSingleResult(); + return (result == null) ? 0 : result; + } + + private Map getStringLongMapResultFromQuery(CriteriaQuery criteriaQuery) { + List categoryNameOccurrences = em.createQuery(criteriaQuery).getResultList(); + Map result = new HashMap<>(); + for (Tuple occurrence : categoryNameOccurrences) { + result.put(occurrence.get(0, String.class), occurrence.get(1, Long.class)); + } + return result; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java index 6f087f9eabc..c2f9027a38a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetVersionServiceBean.java @@ -49,22 +49,6 @@ public class DatasetVersionServiceBean implements java.io.Serializable { private static final SimpleDateFormat logFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH-mm-ss"); - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL = "SELECT fm FROM FileMetadata fm" - + " WHERE fm.datasetVersion.id=:datasetVersionId" - + " ORDER BY fm.label"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE = "SELECT fm FROM FileMetadata fm, DvObject dvo" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = dvo.id" - + " ORDER BY CASE WHEN dvo.publicationDate IS NOT NULL THEN dvo.publicationDate ELSE dvo.createDate END"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.filesize"; - private static final String QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE = "SELECT fm FROM FileMetadata fm, DataFile df" - + " WHERE fm.datasetVersion.id = :datasetVersionId" - + " AND fm.dataFile.id = df.id" - + " ORDER BY df.contentType"; - @EJB DatasetServiceBean datasetService; @@ -166,18 +150,6 @@ public DatasetVersion getDatasetVersion(){ } } // end RetrieveDatasetVersionResponse - /** - * Different criteria to sort the results of FileMetadata queries used in {@link DatasetVersionServiceBean#getFileMetadatas} - */ - public enum FileMetadatasOrderCriteria { - NameAZ, - NameZA, - Newest, - Oldest, - Size, - Type - } - public DatasetVersion find(Object pk) { return em.find(DatasetVersion.class, pk); } @@ -194,9 +166,44 @@ public DatasetVersion findDeep(Object pk) { .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.datasetVersion") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.releaseUser") .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.creator") + .setHint("eclipselink.left-join-fetch", "o.fileMetadatas.dataFile.dataFileTags") .getSingleResult(); } - + + /** + * Performs the same database lookup as the one behind Dataset.getVersions(). + * Additionally, provides the arguments for selecting a partial list of + * (length-offset) versions for pagination, plus the ability to pre-select + * only the publicly-viewable versions. + * It is recommended that individual software components utilize the + * ListVersionsCommand, instead of calling this service method directly. + * @param datasetId + * @param offset for pagination through long lists of versions + * @param length for pagination through long lists of versions + * @param includeUnpublished retrieves all the versions, including drafts and deaccessioned. + * @return (partial) list of versions + */ + public List findVersions(Long datasetId, Integer offset, Integer length, boolean includeUnpublished) { + TypedQuery query; + if (includeUnpublished) { + query = em.createNamedQuery("DatasetVersion.findByDataset", DatasetVersion.class); + } else { + query = em.createNamedQuery("DatasetVersion.findReleasedByDataset", DatasetVersion.class) + .setParameter("datasetId", datasetId); + } + + query.setParameter("datasetId", datasetId); + + if (offset != null) { + query.setFirstResult(offset); + } + if (length != null) { + query.setMaxResults(length); + } + + return query.getResultList(); + } + public DatasetVersion findByFriendlyVersionNumber(Long datasetId, String friendlyVersionNumber) { Long majorVersionNumber = null; Long minorVersionNumber = null; @@ -1252,50 +1259,4 @@ public List getUnarchivedDatasetVersions(){ return null; } } // end getUnarchivedDatasetVersions - - /** - * Returns a FileMetadata list of files in the specified DatasetVersion - * - * @param datasetVersion the DatasetVersion to access - * @param limit for pagination, can be null - * @param offset for pagination, can be null - * @param orderCriteria a FileMetadatasOrderCriteria to order the results - * @return a FileMetadata list of the specified DatasetVersion - */ - public List getFileMetadatas(DatasetVersion datasetVersion, Integer limit, Integer offset, FileMetadatasOrderCriteria orderCriteria) { - TypedQuery query = em.createQuery(getQueryStringFromFileMetadatasOrderCriteria(orderCriteria), FileMetadata.class) - .setParameter("datasetVersionId", datasetVersion.getId()); - if (limit != null) { - query.setMaxResults(limit); - } - if (offset != null) { - query.setFirstResult(offset); - } - return query.getResultList(); - } - - private String getQueryStringFromFileMetadatasOrderCriteria(FileMetadatasOrderCriteria orderCriteria) { - String queryString; - switch (orderCriteria) { - case NameZA: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL + " DESC"; - break; - case Newest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE + " DESC"; - break; - case Oldest: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_DATE; - break; - case Size: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_SIZE; - break; - case Type: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_TYPE; - break; - default: - queryString = QUERY_STR_FIND_ALL_FILE_METADATAS_ORDER_BY_LABEL; - break; - } - return queryString; - } } // end class diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java index d77767985eb..9f86a03639a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseContact.java @@ -99,7 +99,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseContact)) { return false; } DataverseContact other = (DataverseContact) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java index 768c2308e50..83a2d8fdb8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFacet.java @@ -93,7 +93,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseFacet)) { return false; } DataverseFacet other = (DataverseFacet) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java index 39ad6ca9520..d30d94cd034 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverse.java @@ -85,7 +85,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseFeaturedDataverse)) { return false; } DataverseFeaturedDataverse other = (DataverseFeaturedDataverse) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java index 7194a1ef31e..549b8310122 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseServiceBean.java @@ -399,16 +399,7 @@ private File getLogo(Dataverse dataverse) { DataverseTheme theme = dataverse.getDataverseTheme(); if (theme != null && theme.getLogo() != null && !theme.getLogo().isEmpty()) { - Properties p = System.getProperties(); - String domainRoot = p.getProperty("com.sun.aas.instanceRoot"); - - if (domainRoot != null && !"".equals(domainRoot)) { - return new File (domainRoot + File.separator + - "docroot" + File.separator + - "logos" + File.separator + - dataverse.getLogoOwnerId() + File.separator + - theme.getLogo()); - } + return ThemeWidgetFragment.getLogoDir(dataverse.getLogoOwnerId()).resolve(theme.getLogo()).toFile(); } return null; diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java index 539669328a7..7f57d16b95a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseTheme.java @@ -181,7 +181,7 @@ public int hashCode() { @Override public boolean equals(Object object) { - if (!(object instanceof DatasetFieldType)) { + if (!(object instanceof DataverseTheme)) { return false; } DataverseTheme other = (DataverseTheme) object; diff --git a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java index de947ee9058..55817d4a746 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/FileDownloadServiceBean.java @@ -645,15 +645,4 @@ public String getDirectStorageLocatrion(String storageLocation) { return null; } - - /** - * Checks if the DataverseRequest, which contains IP Groups, has permission to download the file - * - * @param dataverseRequest the DataverseRequest - * @param dataFile the DataFile to check permissions - * @return boolean - */ - public boolean canDownloadFile(DataverseRequest dataverseRequest, DataFile dataFile) { - return permissionService.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java new file mode 100644 index 00000000000..62f10c18bdf --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/FileSearchCriteria.java @@ -0,0 +1,45 @@ +package edu.harvard.iq.dataverse; + +public class FileSearchCriteria { + + private final String contentType; + private final FileAccessStatus accessStatus; + private final String categoryName; + private final String tabularTagName; + private final String searchText; + + /** + * Status of the particular DataFile based on active embargoes and restriction state + */ + public enum FileAccessStatus { + Public, Restricted, EmbargoedThenRestricted, EmbargoedThenPublic + } + + public FileSearchCriteria(String contentType, FileAccessStatus accessStatus, String categoryName, String tabularTagName, String searchText) { + this.contentType = contentType; + this.accessStatus = accessStatus; + this.categoryName = categoryName; + this.tabularTagName = tabularTagName; + this.searchText = searchText; + } + + public String getContentType() { + return contentType; + } + + public FileAccessStatus getAccessStatus() { + return accessStatus; + } + + public String getCategoryName() { + return categoryName; + } + + public String getTabularTagName() { + return tabularTagName; + } + + public String getSearchText() { + return searchText; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java index 9a62a99722a..f30051e26ae 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java +++ b/src/main/java/edu/harvard/iq/dataverse/ThemeWidgetFragment.java @@ -7,6 +7,7 @@ import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDataverseThemeCommand; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.JsfHelper; import java.io.File; @@ -14,6 +15,7 @@ import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.util.logging.Level; @@ -49,6 +51,8 @@ public class ThemeWidgetFragment implements java.io.Serializable { static final String DEFAULT_TEXT_COLOR = "888888"; private static final Logger logger = Logger.getLogger(ThemeWidgetFragment.class.getCanonicalName()); + public static final String LOGOS_SUBDIR = "logos"; + public static final String LOGOS_TEMP_SUBDIR = LOGOS_SUBDIR + File.separator + "temp"; private File tempDir; private File uploadedFile; @@ -86,12 +90,18 @@ public void setTaglineInput(HtmlInputText taglineInput) { } - + public static Path getLogoDir(String ownerId) { + return Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup(), LOGOS_SUBDIR, ownerId); + } - private void createTempDir() { + private void createTempDir() { try { - File tempRoot = Files.createDirectories(Paths.get("../docroot/logos/temp")).toFile(); - tempDir = Files.createTempDirectory(tempRoot.toPath(),editDv.getId().toString()).toFile(); + // Create the temporary space if not yet existing (will silently ignore preexisting) + // Note that the docroot directory is checked within ConfigCheckService for presence and write access. + Path tempRoot = Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup(), LOGOS_TEMP_SUBDIR); + Files.createDirectories(tempRoot); + + this.tempDir = Files.createTempDirectory(tempRoot, editDv.getId().toString()).toFile(); } catch (IOException e) { throw new RuntimeException("Error creating temp directory", e); // improve error handling } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 1aa3f4ffde6..696fcb34920 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -31,6 +31,7 @@ import edu.harvard.iq.dataverse.RoleAssignment; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.UserNotificationServiceBean; +import edu.harvard.iq.dataverse.ThemeWidgetFragment; import static edu.harvard.iq.dataverse.api.Datasets.handleVersion; @@ -1196,16 +1197,7 @@ private File getLogo(Dataverse dataverse) { DataverseTheme theme = dataverse.getDataverseTheme(); if (theme != null && theme.getLogo() != null && !theme.getLogo().equals("")) { - Properties p = System.getProperties(); - String domainRoot = p.getProperty("com.sun.aas.instanceRoot"); - - if (domainRoot != null && !"".equals(domainRoot)) { - return new File (domainRoot + File.separator + - "docroot" + File.separator + - "logos" + File.separator + - dataverse.getLogoOwnerId() + File.separator + - theme.getLogo()); - } + return ThemeWidgetFragment.getLogoDir(dataverse.getLogoOwnerId()).resolve(theme.getLogo()).toFile(); } return null; @@ -1681,7 +1673,48 @@ public Response rejectFileAccess(@Context ContainerRequestContext crc, @PathPara return error(BAD_REQUEST, BundleUtil.getStringFromBundle("access.api.fileAccess.rejectFailure.noRequest", args)); } } - + + @GET + @AuthRequired + @Path("/datafile/{id}/userFileAccessRequested") + public Response getUserFileAccessRequested(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + AuthenticatedUser requestAuthenticatedUser; + try { + dataFile = findDataFileOrDie(dataFileId); + requestAuthenticatedUser = getRequestAuthenticatedUserOrDie(crc); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + boolean fileAccessRequested = false; + List requests = dataFile.getFileAccessRequests(); + for (FileAccessRequest fileAccessRequest : requests) { + if (fileAccessRequest.getRequester().getId().equals(requestAuthenticatedUser.getId())) { + fileAccessRequested = true; + break; + } + } + return ok(fileAccessRequested); + } + + @GET + @AuthRequired + @Path("/datafile/{id}/userPermissions") + public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + DataFile dataFile; + try { + dataFile = findDataFileOrDie(dataFileId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + User requestUser = getRequestUser(crc); + jsonObjectBuilder.add("canDownloadFile", permissionService.userOn(requestUser, dataFile).has(Permission.DownloadFile)); + jsonObjectBuilder.add("canManageFilePermissions", permissionService.userOn(requestUser, dataFile).has(Permission.ManageFilePermissions)); + jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); + return ok(jsonObjectBuilder); + } + // checkAuthorization is a convenience method; it calls the boolean method // isAccessAuthorized(), the actual workhorse, tand throws a 403 exception if not. @@ -1949,21 +1982,4 @@ private URI handleCustomZipDownload(User user, String customZipServiceUrl, Strin } return redirectUri; } - - @GET - @AuthRequired - @Path("/datafile/{id}/userPermissions") - public Response getUserPermissionsOnFile(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { - DataFile dataFile; - try { - dataFile = findDataFileOrDie(dataFileId); - } catch (WrappedResponse wr) { - return wr.getResponse(); - } - JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); - User requestUser = getRequestUser(crc); - jsonObjectBuilder.add("canDownloadFile", fileDownloadService.canDownloadFile(createDataverseRequest(requestUser), dataFile)); - jsonObjectBuilder.add("canEditOwnerDataset", permissionService.userOn(requestUser, dataFile.getOwner()).has(Permission.EditDataset)); - return ok(jsonObjectBuilder); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java b/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java index 296869762da..347a8946a46 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/ApiConstants.java @@ -12,4 +12,9 @@ private ApiConstants() { // Authentication public static final String CONTAINER_REQUEST_CONTEXT_USER = "user"; + + // Dataset + public static final String DS_VERSION_LATEST = ":latest"; + public static final String DS_VERSION_DRAFT = ":draft"; + public static final String DS_VERSION_LATEST_PUBLISHED = ":latest-published"; } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 704ec8f1989..292aba0cee3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -29,6 +29,7 @@ import edu.harvard.iq.dataverse.engine.command.impl.CreateDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.CreatePrivateUrlCommand; import edu.harvard.iq.dataverse.engine.command.impl.CuratePublishedDatasetVersionCommand; +import edu.harvard.iq.dataverse.engine.command.impl.DeaccessionDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.DeleteDatasetLinkingDataverseCommand; @@ -93,10 +94,11 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonLDTerm; import edu.harvard.iq.dataverse.util.json.JsonParseException; -import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.util.json.JsonUtil; +import edu.harvard.iq.dataverse.util.SignpostingResources; import edu.harvard.iq.dataverse.search.IndexServiceBean; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.*; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; @@ -109,7 +111,6 @@ import java.io.IOException; import java.io.InputStream; -import java.io.StringReader; import java.net.URI; import java.sql.Timestamp; import java.text.MessageFormat; @@ -236,6 +237,9 @@ public class Datasets extends AbstractApiBean { @Inject PrivateUrlServiceBean privateUrlService; + @Inject + DatasetVersionFilesServiceBean datasetVersionFilesServiceBean; + /** * Used to consolidate the way we parse and handle dataset versions. * @param @@ -260,7 +264,7 @@ public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id" MakeDataCountLoggingServiceBean.MakeDataCountEntry entry = new MakeDataCountEntry(uriInfo, headers, dvRequestService, retrieved); mdcLogService.logEntry(entry); } - return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest) : null)); + return ok(jsonbuilder.add("latestVersion", (latest != null) ? json(latest, true) : null)); }, getRequestUser(crc)); } @@ -272,7 +276,7 @@ public Response getDataset(@Context ContainerRequestContext crc, @PathParam("id" @GET @Path("/export") - @Produces({"application/xml", "application/json", "application/html" }) + @Produces({"application/xml", "application/json", "application/html", "application/ld+json" }) public Response exportDataset(@QueryParam("persistentId") String persistentId, @QueryParam("exporter") String exporter, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { try { @@ -387,8 +391,8 @@ public Response destroyDataset(@Context ContainerRequestContext crc, @PathParam( @AuthRequired @Path("{id}/versions/{versionId}") public Response deleteDraftVersion(@Context ContainerRequestContext crc, @PathParam("id") String id, @PathParam("versionId") String versionId ){ - if ( ! ":draft".equals(versionId) ) { - return badRequest("Only the :draft version can be deleted"); + if (!DS_VERSION_DRAFT.equals(versionId)) { + return badRequest("Only the " + DS_VERSION_DRAFT + " version can be deleted"); } return response( req -> { @@ -466,41 +470,121 @@ public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @Pa @GET @AuthRequired @Path("{id}/versions") - public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id ) { - return response( req -> - ok( execCommand( new ListVersionsCommand(req, findDatasetOrDie(id)) ) + public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("includeFiles") Boolean includeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) { + + return response( req -> { + Dataset dataset = findDatasetOrDie(id); + + return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, (includeFiles == null ? true : includeFiles)) ) .stream() - .map( d -> json(d) ) - .collect(toJsonArray())), getRequestUser(crc)); + .map( d -> json(d, includeFiles == null ? true : includeFiles) ) + .collect(toJsonArray())); + }, getRequestUser(crc)); } @GET @AuthRequired @Path("{id}/versions/{versionId}") - public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + public Response getVersion(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("includeFiles") Boolean includeFiles, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { return response( req -> { - DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - return (dsv == null || dsv.getId() == null) ? notFound("Dataset version not found") - : ok(json(dsv)); + DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); + + if (dsv == null || dsv.getId() == null) { + return notFound("Dataset version not found"); + } + + if (includeFiles == null ? true : includeFiles) { + dsv = datasetversionService.findDeep(dsv.getId()); + } + return ok(json(dsv, includeFiles == null ? true : includeFiles)); }, getRequestUser(crc)); } - + @GET @AuthRequired @Path("{id}/versions/{versionId}/files") - public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset, @QueryParam("orderCriteria") String orderCriteria, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return response( req -> { - DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - DatasetVersionServiceBean.FileMetadatasOrderCriteria fileMetadatasOrderCriteria; + public Response getVersionFiles(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("limit") Integer limit, + @QueryParam("offset") Integer offset, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, + @QueryParam("orderCriteria") String orderCriteria, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response(req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); + DatasetVersionFilesServiceBean.FileOrderCriteria fileOrderCriteria; try { - fileMetadatasOrderCriteria = orderCriteria != null ? DatasetVersionServiceBean.FileMetadatasOrderCriteria.valueOf(orderCriteria) : DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameAZ; + fileOrderCriteria = orderCriteria != null ? DatasetVersionFilesServiceBean.FileOrderCriteria.valueOf(orderCriteria) : DatasetVersionFilesServiceBean.FileOrderCriteria.NameAZ; } catch (IllegalArgumentException e) { - return error(Response.Status.BAD_REQUEST, "Invalid order criteria: " + orderCriteria); + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.order.criteria", List.of(orderCriteria))); } - return ok(jsonFileMetadatas(datasetversionService.getFileMetadatas(datasetVersion, limit, offset, fileMetadatasOrderCriteria))); + FileSearchCriteria fileSearchCriteria; + try { + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); + } catch (IllegalArgumentException e) { + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); + } + return ok(jsonFileMetadatas(datasetVersionFilesServiceBean.getFileMetadatas(datasetVersion, limit, offset, fileSearchCriteria, fileOrderCriteria))); }, getRequestUser(crc)); } - + + @GET + @AuthRequired + @Path("{id}/versions/{versionId}/files/counts") + public Response getVersionFileCounts(@Context ContainerRequestContext crc, + @PathParam("id") String datasetId, + @PathParam("versionId") String versionId, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { + return response(req -> { + FileSearchCriteria fileSearchCriteria; + try { + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); + } catch (IllegalArgumentException e) { + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); + } + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, includeDeaccessioned); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("total", datasetVersionFilesServiceBean.getFileMetadataCount(datasetVersion, fileSearchCriteria)); + jsonObjectBuilder.add("perContentType", json(datasetVersionFilesServiceBean.getFileMetadataCountPerContentType(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perCategoryName", json(datasetVersionFilesServiceBean.getFileMetadataCountPerCategoryName(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perTabularTagName", jsonFileCountPerTabularTagNameMap(datasetVersionFilesServiceBean.getFileMetadataCountPerTabularTagName(datasetVersion, fileSearchCriteria))); + jsonObjectBuilder.add("perAccessStatus", jsonFileCountPerAccessStatusMap(datasetVersionFilesServiceBean.getFileMetadataCountPerAccessStatus(datasetVersion, fileSearchCriteria))); + return ok(jsonObjectBuilder); + }, getRequestUser(crc)); + } + @GET @AuthRequired @Path("{id}/dirindex") @@ -508,7 +592,7 @@ public Response getVersionFiles(@Context ContainerRequestContext crc, @PathParam public Response getFileAccessFolderView(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @QueryParam("version") String versionId, @QueryParam("folder") String folderName, @QueryParam("original") Boolean originals, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) { folderName = folderName == null ? "" : folderName; - versionId = versionId == null ? ":latest-published" : versionId; + versionId = versionId == null ? DS_VERSION_LATEST_PUBLISHED : versionId; DatasetVersion version; try { @@ -582,24 +666,26 @@ public Response getVersionMetadataBlock(@Context ContainerRequestContext crc, @GET @AuthRequired @Path("{id}/versions/{versionId}/linkset") - public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - if ( ":draft".equals(versionId) ) { - return badRequest("Signposting is not supported on the :draft version"); + public Response getLinkset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, + @Context UriInfo uriInfo, @Context HttpHeaders headers) { + if (DS_VERSION_DRAFT.equals(versionId)) { + return badRequest("Signposting is not supported on the " + DS_VERSION_DRAFT + " version"); } - User user = getRequestUser(crc); - return response(req -> { + DataverseRequest req = createDataverseRequest(getRequestUser(crc)); + try { DatasetVersion dsv = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers); - return ok(Json.createObjectBuilder().add( - "linkset", - new SignpostingResources( - systemConfig, - dsv, - JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""), - JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse("") - ).getJsonLinkset() - ) - ); - }, user); + return Response + .ok(Json.createObjectBuilder() + .add("linkset", + new SignpostingResources(systemConfig, dsv, + JvmSettings.SIGNPOSTING_LEVEL1_AUTHOR_LIMIT.lookupOptional().orElse(""), + JvmSettings.SIGNPOSTING_LEVEL1_ITEM_LIMIT.lookupOptional().orElse("")) + .getJsonLinkset()) + .build()) + .type(MediaType.APPLICATION_JSON).build(); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } } @GET @@ -669,16 +755,15 @@ public Response updateDatasetPIDMetadataAll(@Context ContainerRequestContext crc @AuthRequired @Path("{id}/versions/{versionId}") @Consumes(MediaType.APPLICATION_JSON) - public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId){ - - if ( ! ":draft".equals(versionId) ) { - return error( Response.Status.BAD_REQUEST, "Only the :draft version can be updated"); + public Response updateDraftVersion(@Context ContainerRequestContext crc, String jsonBody, @PathParam("id") String id, @PathParam("versionId") String versionId) { + if (!DS_VERSION_DRAFT.equals(versionId)) { + return error( Response.Status.BAD_REQUEST, "Only the " + DS_VERSION_DRAFT + " version can be updated"); } - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); DatasetVersion incomingVersion = jsonParser().parseDatasetVersion(json); // clear possibly stale fields from the incoming dataset version. @@ -716,7 +801,7 @@ public Response updateDraftVersion(@Context ContainerRequestContext crc, String } managedVersion = execCommand(new CreateDatasetVersionCommand(req, ds, incomingVersion)); } - return ok( json(managedVersion) ); + return ok( json(managedVersion, true) ); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset version Json: " + ex.getMessage(), ex); @@ -755,7 +840,7 @@ public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @ @Path("{id}/metadata") @Produces("application/ld+json, application/json-ld") public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) { - return getVersionJsonLDMetadata(crc, id, ":draft", uriInfo, headers); + return getVersionJsonLDMetadata(crc, id, DS_VERSION_DRAFT, uriInfo, headers); } @PUT @@ -834,10 +919,10 @@ public Response deleteVersionMetadata(@Context ContainerRequestContext crc, Stri } private Response processDatasetFieldDataDelete(String jsonBody, String id, DataverseRequest req) { - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -951,7 +1036,7 @@ private Response processDatasetFieldDataDelete(String jsonBody, String id, Datav DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion(); - return ok(json(managedVersion)); + return ok(json(managedVersion, true)); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex); @@ -991,10 +1076,10 @@ public Response editVersionMetadata(@Context ContainerRequestContext crc, String private Response processDatasetUpdate(String jsonBody, String id, DataverseRequest req, Boolean replaceData){ - try (StringReader rdr = new StringReader(jsonBody)) { + try { Dataset ds = findDatasetOrDie(id); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); //Get the current draft or create a new version to update DatasetVersion dsv = ds.getOrCreateEditVersion(); dsv.getTermsOfUseAndAccess().setDatasetVersion(dsv); @@ -1100,7 +1185,7 @@ private Response processDatasetUpdate(String jsonBody, String id, DataverseReque } DatasetVersion managedVersion = execCommand(new UpdateDatasetVersionCommand(ds, req)).getLatestVersion(); - return ok(json(managedVersion)); + return ok(json(managedVersion, true)); } catch (JsonParseException ex) { logger.log(Level.SEVERE, "Semantic error parsing dataset update Json: " + ex.getMessage(), ex); @@ -1441,8 +1526,7 @@ public Response createFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); Embargo embargo = new Embargo(); @@ -1585,8 +1669,7 @@ public Response removeFileEmbargo(@Context ContainerRequestContext crc, @PathPar return error(Status.BAD_REQUEST, "No Embargoes allowed"); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); List datasetFiles = dataset.getFiles(); List embargoFilesToUnset = new LinkedList<>(); @@ -1689,7 +1772,7 @@ public Response getCustomTermsTab(@PathParam("id") String id, @PathParam("versio return error(Status.NOT_FOUND, "This Dataset has no custom license"); } persistentId = getRequestParameter(":persistentId".substring(1)); - if (versionId.equals(":draft")) { + if (versionId.equals(DS_VERSION_DRAFT)) { versionId = "DRAFT"; } } catch (WrappedResponse wrappedResponse) { @@ -2101,8 +2184,7 @@ public Response returnToAuthor(@Context ContainerRequestContext crc, @PathParam( if (jsonBody == null || jsonBody.isEmpty()) { return error(Response.Status.BAD_REQUEST, "You must supply JSON to this API endpoint and it must contain a reason for returning the dataset (field: reasonForReturn)."); } - StringReader rdr = new StringReader(jsonBody); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); try { Dataset dataset = findDatasetOrDie(idSupplied); String reasonForReturn = null; @@ -2354,9 +2436,7 @@ public Response completeMPUpload(@Context ContainerRequestContext crc, String pa List eTagList = new ArrayList(); logger.info("Etags: " + partETagBody); try { - JsonReader jsonReader = Json.createReader(new StringReader(partETagBody)); - JsonObject object = jsonReader.readObject(); - jsonReader.close(); + JsonObject object = JsonUtil.getJsonObject(partETagBody); for (String partNo : object.keySet()) { eTagList.add(new PartETag(Integer.parseInt(partNo), object.getString(partNo))); } @@ -2650,11 +2730,11 @@ private void msgt(String m) { public static T handleVersion(String versionId, DsVersionHandler hdl) throws WrappedResponse { switch (versionId) { - case ":latest": + case DS_VERSION_LATEST: return hdl.handleLatest(); - case ":draft": + case DS_VERSION_DRAFT: return hdl.handleDraft(); - case ":latest-published": + case DS_VERSION_LATEST_PUBLISHED: return hdl.handleLatestPublished(); default: try { @@ -2674,11 +2754,15 @@ public static T handleVersion(String versionId, DsVersionHandler hdl) } private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers) throws WrappedResponse { + return getDatasetVersionOrDie(req, versionNumber, ds, uriInfo, headers, false); + } + + private DatasetVersion getDatasetVersionOrDie(final DataverseRequest req, String versionNumber, final Dataset ds, UriInfo uriInfo, HttpHeaders headers, boolean includeDeaccessioned) throws WrappedResponse { DatasetVersion dsv = execCommand(handleVersion(versionNumber, new DsVersionHandler>() { @Override public Command handleLatest() { - return new GetLatestAccessibleDatasetVersionCommand(req, ds); + return new GetLatestAccessibleDatasetVersionCommand(req, ds, includeDeaccessioned); } @Override @@ -2688,12 +2772,12 @@ public Command handleDraft() { @Override public Command handleSpecific(long major, long minor) { - return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor); + return new GetSpecificPublishedDatasetVersionCommand(req, ds, major, minor, includeDeaccessioned); } @Override public Command handleLatestPublished() { - return new GetLatestPublishedDatasetVersionCommand(req, ds); + return new GetLatestPublishedDatasetVersionCommand(req, ds, includeDeaccessioned); } })); if (dsv == null || dsv.getId() == null) { @@ -2906,25 +2990,58 @@ public Response getMakeDataCountMetricCurrentMonth(@PathParam("id") String idSup String nullCurrentMonth = null; return getMakeDataCountMetric(idSupplied, metricSupplied, nullCurrentMonth, country); } - + @GET @AuthRequired @Path("{identifier}/storagesize") - public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + public Response getStorageSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("includeCached") boolean includeCached) { return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.storage"), execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), includeCached, GetDatasetStorageSizeCommand.Mode.STORAGE, null)))), getRequestUser(crc)); } - + @GET @AuthRequired @Path("{identifier}/versions/{versionId}/downloadsize") - public Response getDownloadSize(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @PathParam("versionId") String version, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + public Response getDownloadSize(@Context ContainerRequestContext crc, + @PathParam("identifier") String dvIdtf, + @PathParam("versionId") String version, + @QueryParam("contentType") String contentType, + @QueryParam("accessStatus") String accessStatus, + @QueryParam("categoryName") String categoryName, + @QueryParam("tabularTagName") String tabularTagName, + @QueryParam("searchText") String searchText, + @QueryParam("mode") String mode, + @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, + @Context UriInfo uriInfo, + @Context HttpHeaders headers) { - return response(req -> ok(MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), - execCommand(new GetDatasetStorageSizeCommand(req, findDatasetOrDie(dvIdtf), false, GetDatasetStorageSizeCommand.Mode.DOWNLOAD, getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers))))), getRequestUser(crc)); + return response(req -> { + FileSearchCriteria fileSearchCriteria; + try { + fileSearchCriteria = new FileSearchCriteria( + contentType, + accessStatus != null ? FileSearchCriteria.FileAccessStatus.valueOf(accessStatus) : null, + categoryName, + tabularTagName, + searchText + ); + } catch (IllegalArgumentException e) { + return badRequest(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(accessStatus))); + } + DatasetVersionFilesServiceBean.FileDownloadSizeMode fileDownloadSizeMode; + try { + fileDownloadSizeMode = mode != null ? DatasetVersionFilesServiceBean.FileDownloadSizeMode.valueOf(mode) : DatasetVersionFilesServiceBean.FileDownloadSizeMode.All; + } catch (IllegalArgumentException e) { + return error(Response.Status.BAD_REQUEST, "Invalid mode: " + mode); + } + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, version, findDatasetOrDie(dvIdtf), uriInfo, headers, includeDeaccessioned); + long datasetStorageSize = datasetVersionFilesServiceBean.getFilesDownloadSize(datasetVersion, fileSearchCriteria, fileDownloadSizeMode); + String message = MessageFormat.format(BundleUtil.getStringFromBundle("datasets.api.datasize.download"), datasetStorageSize); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("message", message); + jsonObjectBuilder.add("storageSize", datasetStorageSize); + return ok(jsonObjectBuilder); + }, getRequestUser(crc)); } @GET @@ -3856,9 +3973,9 @@ public Response getPrivateUrlDatasetVersion(@PathParam("privateUrlToken") String JsonObjectBuilder responseJson; if (isAnonymizedAccess) { List anonymizedFieldTypeNamesList = new ArrayList<>(Arrays.asList(anonymizedFieldTypeNames.split(",\\s"))); - responseJson = json(dsv, anonymizedFieldTypeNamesList); + responseJson = json(dsv, anonymizedFieldTypeNamesList, true); } else { - responseJson = json(dsv); + responseJson = json(dsv, true); } return ok(responseJson); } @@ -3882,15 +3999,43 @@ public Response getDatasetVersionCitation(@Context ContainerRequestContext crc, return response(req -> ok( getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers).getCitation(true, false)), getRequestUser(crc)); } - + + @POST + @AuthRequired + @Path("{id}/versions/{versionId}/deaccession") + public Response deaccessionDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, String jsonBody, @Context UriInfo uriInfo, @Context HttpHeaders headers) { + if (DS_VERSION_DRAFT.equals(versionId) || DS_VERSION_LATEST.equals(versionId)) { + return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED))); + } + return response(req -> { + DatasetVersion datasetVersion = getDatasetVersionOrDie(req, versionId, findDatasetOrDie(datasetId), uriInfo, headers, false); + try { + JsonObject jsonObject = JsonUtil.getJsonObject(jsonBody); + datasetVersion.setVersionNote(jsonObject.getString("deaccessionReason")); + String deaccessionForwardURL = jsonObject.getString("deaccessionForwardURL", null); + if (deaccessionForwardURL != null) { + try { + datasetVersion.setArchiveNote(deaccessionForwardURL); + } catch (IllegalArgumentException iae) { + return badRequest(BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.forward.url", List.of(iae.getMessage()))); + } + } + execCommand(new DeaccessionDatasetVersionCommand(req, datasetVersion, false)); + return ok("Dataset " + datasetId + " deaccessioned for version " + versionId); + } catch (JsonParsingException jpe) { + return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } + @GET @AuthRequired @Path("{identifier}/guestbookEntryAtRequest") public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - - Dataset dataset; - + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + + Dataset dataset; + try { dataset = findDatasetOrDie(dvIdtf); } catch (WrappedResponse ex) { @@ -3902,14 +4047,14 @@ public Response getGuestbookEntryOption(@Context ContainerRequestContext crc, @P } return ok(dataset.getEffectiveGuestbookEntryAtRequest()); } - + @PUT @AuthRequired @Path("{identifier}/guestbookEntryAtRequest") public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, - boolean gbAtRequest, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + boolean gbAtRequest, + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + // Superuser-only: AuthenticatedUser user; try { @@ -3937,13 +4082,13 @@ public Response setguestbookEntryAtRequest(@Context ContainerRequestContext crc, datasetService.merge(dataset); return ok("Guestbook Entry At Request set to: " + choice); } - + @DELETE @AuthRequired @Path("{identifier}/guestbookEntryAtRequest") public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, - @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { - + @Context UriInfo uriInfo, @Context HttpHeaders headers) throws WrappedResponse { + // Superuser-only: AuthenticatedUser user; try { @@ -3962,10 +4107,29 @@ public Response resetGuestbookEntryAtRequest(@Context ContainerRequestContext cr } catch (WrappedResponse ex) { return error(Response.Status.NOT_FOUND, "No such dataset"); } - + dataset.setGuestbookEntryAtRequest(DvObjectContainer.UNDEFINED_CODE); datasetService.merge(dataset); return ok("Guestbook Entry At Request reset to default: " + dataset.getEffectiveGuestbookEntryAtRequest()); } + @GET + @AuthRequired + @Path("{id}/userPermissions") + public Response getUserPermissionsOnDataset(@Context ContainerRequestContext crc, @PathParam("id") String datasetId) { + Dataset dataset; + try { + dataset = findDatasetOrDie(datasetId); + } catch (WrappedResponse wr) { + return wr.getResponse(); + } + User requestUser = getRequestUser(crc); + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("canViewUnpublishedDataset", permissionService.userOn(requestUser, dataset).has(Permission.ViewUnpublishedDataset)); + jsonObjectBuilder.add("canEditDataset", permissionService.userOn(requestUser, dataset).has(Permission.EditDataset)); + jsonObjectBuilder.add("canPublishDataset", permissionService.userOn(requestUser, dataset).has(Permission.PublishDataset)); + jsonObjectBuilder.add("canManageDatasetPermissions", permissionService.userOn(requestUser, dataset).has(Permission.ManageDatasetPermissions)); + jsonObjectBuilder.add("canDeleteDatasetDraft", permissionService.userOn(requestUser, dataset).has(Permission.DeleteDatasetDraft)); + return ok(jsonObjectBuilder); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index a60775cbd38..d0711aefa5f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -78,8 +78,9 @@ import edu.harvard.iq.dataverse.util.json.JSONLDUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import static edu.harvard.iq.dataverse.util.json.JsonPrinter.brief; -import java.io.StringReader; import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -178,8 +179,8 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, Dataverse d; JsonObject dvJson; - try (StringReader rdr = new StringReader(body)) { - dvJson = Json.createReader(rdr).readObject(); + try { + dvJson = JsonUtil.getJsonObject(body); d = jsonParser().parseDataverse(dvJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); @@ -559,8 +560,8 @@ public Response recreateDataset(@Context ContainerRequestContext crc, String jso } private Dataset parseDataset(String datasetJson) throws WrappedResponse { - try (StringReader rdr = new StringReader(datasetJson)) { - return jsonParser().parseDataset(Json.createReader(rdr).readObject()); + try { + return jsonParser().parseDataset(JsonUtil.getJsonObject(datasetJson)); } catch (JsonParsingException | JsonParseException jpe) { logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", datasetJson); throw new WrappedResponse(error(Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage())); @@ -976,6 +977,8 @@ public Response listAssignments(@Context ContainerRequestContext crc, @PathParam */ // File tempDir; // +// TODO: Code duplicate in ThemeWidgetFragment. Maybe extract, make static and put some place else? +// Important: at least use JvmSettings.DOCROOT_DIRECTORY and not the hardcoded location! // private void createTempDir(Dataverse editDv) { // try { // File tempRoot = java.nio.file.Files.createDirectories(Paths.get("../docroot/logos/temp")).toFile(); @@ -1171,8 +1174,9 @@ public Response getGroupByOwnerAndAliasInOwner(@Context ContainerRequestContext public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContext crc, @PathParam("identifier") String dvIdtf, @QueryParam("guestbookId") Long gbId, @Context HttpServletResponse response) { + Dataverse dv; try { - Dataverse dv = findDataverseOrDie(dvIdtf); + dv = findDataverseOrDie(dvIdtf); User u = getRequestUser(crc); DataverseRequest req = createDataverseRequest(u); if (permissionSvc.request(req) @@ -1192,16 +1196,14 @@ public Response getGuestbookResponsesByDataverse(@Context ContainerRequestContex public void write(OutputStream os) throws IOException, WebApplicationException { - Dataverse dv = dataverseService.findByAlias(dvIdtf); Map customQandAs = guestbookResponseService.mapCustomQuestionAnswersAsStrings(dv.getId(), gbId); Map datasetTitles = guestbookResponseService.mapDatasetTitles(dv.getId()); - + List guestbookResults = guestbookResponseService.getGuestbookResults(dv.getId(), gbId); os.write("Guestbook, Dataset, Dataset PID, Date, Type, File Name, File Id, File PID, User Name, Email, Institution, Position, Custom Questions\n".getBytes()); for (Object[] result : guestbookResults) { StringBuilder sb = guestbookResponseService.convertGuestbookResponsesToCSV(customQandAs, datasetTitles, result); os.write(sb.toString().getBytes()); - } } }; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java index 82811162d52..ad24d81d996 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java @@ -4,6 +4,7 @@ import com.google.gson.JsonObject; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.DataFileTag; import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetLock; import edu.harvard.iq.dataverse.DatasetServiceBean; @@ -54,6 +55,7 @@ import java.io.IOException; import java.io.InputStream; +import java.io.StringReader; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -63,15 +65,12 @@ import jakarta.ejb.EJBException; import jakarta.inject.Inject; import jakarta.json.Json; +import jakarta.json.JsonArray; +import jakarta.json.JsonString; +import jakarta.json.JsonValue; +import jakarta.json.stream.JsonParsingException; import jakarta.servlet.http.HttpServletResponse; -import jakarta.ws.rs.Consumes; -import jakarta.ws.rs.DELETE; -import jakarta.ws.rs.GET; -import jakarta.ws.rs.POST; -import jakarta.ws.rs.PUT; -import jakarta.ws.rs.Path; -import jakarta.ws.rs.PathParam; -import jakarta.ws.rs.QueryParam; +import jakarta.ws.rs.*; import jakarta.ws.rs.container.ContainerRequestContext; import jakarta.ws.rs.core.Context; import jakarta.ws.rs.core.HttpHeaders; @@ -112,6 +111,8 @@ public class Files extends AbstractApiBean { MakeDataCountLoggingServiceBean mdcLogService; @Inject GuestbookResponseServiceBean guestbookResponseService; + @Inject + DataFileServiceBean dataFileServiceBean; private static final Logger logger = Logger.getLogger(Files.class.getName()); @@ -852,18 +853,82 @@ public Response getFileDataTables(@Context ContainerRequestContext crc, @PathPar try { dataFile = findDataFileOrDie(dataFileId); } catch (WrappedResponse e) { - return error(Response.Status.NOT_FOUND, "File not found for given id."); + return notFound("File not found for given id."); } if (dataFile.isRestricted() || FileUtil.isActivelyEmbargoed(dataFile)) { DataverseRequest dataverseRequest = createDataverseRequest(getRequestUser(crc)); boolean hasPermissionToDownloadFile = permissionSvc.requestOn(dataverseRequest, dataFile).has(Permission.DownloadFile); if (!hasPermissionToDownloadFile) { - return error(FORBIDDEN, "Insufficient permissions to access the requested information."); + return forbidden("Insufficient permissions to access the requested information."); } } if (!dataFile.isTabularData()) { - return error(BAD_REQUEST, "This operation is only available for tabular files."); + return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); } return ok(jsonDT(dataFile.getDataTables())); } + + @POST + @AuthRequired + @Path("{id}/metadata/categories") + @Produces(MediaType.APPLICATION_JSON) + public Response setFileCategories(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + jakarta.json.JsonObject jsonObject; + try (StringReader stringReader = new StringReader(jsonBody)) { + jsonObject = Json.createReader(stringReader).readObject(); + JsonArray requestedCategoriesJson = jsonObject.getJsonArray("categories"); + FileMetadata fileMetadata = dataFile.getFileMetadata(); + for (JsonValue jsonValue : requestedCategoriesJson) { + JsonString jsonString = (JsonString) jsonValue; + fileMetadata.addCategoryByName(jsonString.getString()); + } + execCommand(new UpdateDatasetVersionCommand(fileMetadata.getDataFile().getOwner(), req)); + return ok("Categories of file " + dataFileId + " updated."); + } catch (JsonParsingException jpe) { + return badRequest("Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } + + @POST + @AuthRequired + @Path("{id}/metadata/tabularTags") + @Produces(MediaType.APPLICATION_JSON) + public Response setFileTabularTags(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId, String jsonBody) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + if (!dataFile.isTabularData()) { + return badRequest(BundleUtil.getStringFromBundle("files.api.only.tabular.supported")); + } + jakarta.json.JsonObject jsonObject; + try (StringReader stringReader = new StringReader(jsonBody)) { + jsonObject = Json.createReader(stringReader).readObject(); + JsonArray requestedTabularTagsJson = jsonObject.getJsonArray("tabularTags"); + for (JsonValue jsonValue : requestedTabularTagsJson) { + JsonString jsonString = (JsonString) jsonValue; + try { + dataFile.addUniqueTagByLabel(jsonString.getString()); + } catch (IllegalArgumentException iax){ + return badRequest(iax.getMessage()); + } + } + execCommand(new UpdateDatasetVersionCommand(dataFile.getOwner(), req)); + return ok("Tabular tags of file " + dataFileId + " updated."); + } catch (JsonParsingException jpe) { + return badRequest("Error parsing Json: " + jpe.getMessage()); + } + }, getRequestUser(crc)); + } + + @GET + @AuthRequired + @Path("{id}/hasBeenDeleted") + public Response getHasBeenDeleted(@Context ContainerRequestContext crc, @PathParam("id") String dataFileId) { + return response(req -> { + DataFile dataFile = execCommand(new GetDataFileCommand(req, findDataFileOrDie(dataFileId))); + return ok(dataFileServiceBean.hasBeenDeleted(dataFile)); + }, getRequestUser(crc)); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java index d7eec9f5757..dfc9f48dd1a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/HarvestingClients.java @@ -16,6 +16,7 @@ import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonPrinter; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.json.JsonObjectBuilder; import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import java.io.IOException; @@ -164,8 +165,8 @@ public Response createHarvestingClient(@Context ContainerRequestContext crc, Str return wr.getResponse(); } - try ( StringReader rdr = new StringReader(jsonBody) ) { - JsonObject json = Json.createReader(rdr).readObject(); + try { + JsonObject json = JsonUtil.getJsonObject(jsonBody); // Check that the client with this name doesn't exist yet: // (we could simply let the command fail, but that does not result @@ -261,9 +262,9 @@ public Response modifyHarvestingClient(@Context ContainerRequestContext crc, Str String ownerDataverseAlias = harvestingClient.getDataverse().getAlias(); - try ( StringReader rdr = new StringReader(jsonBody) ) { + try { DataverseRequest req = createDataverseRequest(getRequestUser(crc)); - JsonObject json = Json.createReader(rdr).readObject(); + JsonObject json = JsonUtil.getJsonObject(jsonBody); HarvestingClient newHarvestingClient = new HarvestingClient(); String newDataverseAlias = jsonParser().parseHarvestingClient(json, newHarvestingClient); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java index 37b4792920f..7f81ca20988 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Prov.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Prov.java @@ -12,7 +12,8 @@ import edu.harvard.iq.dataverse.engine.command.impl.PersistProvJsonCommand; import edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand; import edu.harvard.iq.dataverse.util.BundleUtil; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.util.HashMap; import java.util.logging.Logger; import jakarta.inject.Inject; @@ -109,11 +110,10 @@ public Response addProvFreeForm(@Context ContainerRequestContext crc, String bod if(!systemConfig.isProvCollectionEnabled()) { return error(FORBIDDEN, BundleUtil.getStringFromBundle("api.prov.error.provDisabled")); } - StringReader rdr = new StringReader(body); JsonObject jsonObj = null; try { - jsonObj = Json.createReader(rdr).readObject(); + jsonObj = JsonUtil.getJsonObject(body); } catch (JsonException ex) { return error(BAD_REQUEST, BundleUtil.getStringFromBundle("api.prov.error.freeformInvalidJson")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java index bcb67b180c8..c17ba909230 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/imports/ImportServiceBean.java @@ -36,12 +36,12 @@ import edu.harvard.iq.dataverse.util.ConstraintViolationUtil; import edu.harvard.iq.dataverse.util.json.JsonParseException; import edu.harvard.iq.dataverse.util.json.JsonParser; +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.license.LicenseServiceBean; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintWriter; -import java.io.StringReader; import java.nio.file.Files; import java.util.ArrayList; import java.util.Date; @@ -60,7 +60,6 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonReader; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.validation.ConstraintViolation; @@ -259,9 +258,8 @@ public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, Harve throw new ImportException("Failed to transform XML metadata format "+metadataFormat+" into a DatasetDTO"); } } - - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService, harvestingClient); @@ -396,10 +394,8 @@ public JsonObject ddiToJson(String xmlToParse) throws ImportException, XMLStream // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); - return obj; + return JsonUtil.getJsonObject(json); } public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException { @@ -416,8 +412,7 @@ public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse o // convert DTO to Json, Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(dsDTO); - JsonReader jsonReader = Json.createReader(new StringReader(json)); - JsonObject obj = jsonReader.readObject(); + JsonObject obj = JsonUtil.getJsonObject(json); //and call parse Json to read it into a dataset try { JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService, licenseService); diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java index 822ada0b83e..e3c6bfede7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/S3AccessIO.java @@ -191,7 +191,7 @@ public void open(DataAccessOption... options) throws IOException { } } // else we're OK (assumes bucket name in storageidentifier matches the driver's bucketname) } else { - if(!storageIdentifier.substring((this.driverId + DataAccess.SEPARATOR).length()).contains(":")) { + if(!storageIdentifier.contains(":")) { //No driver id or bucket newStorageIdentifier= this.driverId + DataAccess.SEPARATOR + bucketName + ":" + storageIdentifier; } else { @@ -1385,4 +1385,4 @@ public List cleanUp(Predicate filter, boolean dryRun) throws IOE } return toDelete; } -} \ No newline at end of file +} diff --git a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java index 00db98e894e..d33f8f5e5bd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataaccess/StorageIO.java @@ -606,7 +606,7 @@ public static String getDriverPrefix(String driverId) { } public static boolean isDirectUploadEnabled(String driverId) { - return (DataAccess.S3.equals(driverId) && Boolean.parseBoolean(System.getProperty("dataverse.files." + DataAccess.S3 + ".upload-redirect"))) || + return (System.getProperty("dataverse.files." + driverId + ".type").equals(DataAccess.S3) && Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-redirect"))) || Boolean.parseBoolean(System.getProperty("dataverse.files." + driverId + ".upload-out-of-band")); } diff --git a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java index adbd132bce8..096f1f87acc 100644 --- a/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/dataset/DatasetUtil.java @@ -9,6 +9,8 @@ import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.dataaccess.DataAccess; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static edu.harvard.iq.dataverse.dataaccess.DataAccess.getStorageIO; import edu.harvard.iq.dataverse.dataaccess.StorageIO; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; @@ -521,7 +523,7 @@ public static boolean validateDatasetMetadataExternally(Dataset ds, String execu // for the filter to whitelist by these attributes. try { - jsonMetadata = json(ds).add("datasetVersion", json(ds.getLatestVersion())) + jsonMetadata = json(ds).add("datasetVersion", json(ds.getLatestVersion(), true)) .add("sourceAddress", sourceAddressLabel) .add("userIdentifier", userIdentifier) .add("parentAlias", ds.getOwner().getAlias()) @@ -580,10 +582,10 @@ public static String getLicenseURI(DatasetVersion dsv) { // Return the URI // For standard licenses, just return the stored URI return (license != null) ? license.getUri().toString() - // For custom terms, construct a URI with :draft or the version number in the URI + // For custom terms, construct a URI with draft version constant or the version number in the URI : (dsv.getVersionState().name().equals("DRAFT") ? dsv.getDataverseSiteUrl() - + "/api/datasets/:persistentId/versions/:draft/customlicense?persistentId=" + + "/api/datasets/:persistentId/versions/" + DS_VERSION_DRAFT + "/customlicense?persistentId=" + dsv.getDataset().getGlobalId().asString() : dsv.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" + dsv.getVersionNumber() + "." + dsv.getMinorVersionNumber() + "/customlicense?persistentId=" diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java index f5e70209744..3da087addd9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/FinalizeDatasetPublicationCommand.java @@ -10,6 +10,7 @@ import edu.harvard.iq.dataverse.DatasetVersionUser; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.Embargo; import edu.harvard.iq.dataverse.UserNotification; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; @@ -117,9 +118,37 @@ public Dataset execute(CommandContext ctxt) throws CommandException { // is this the first publication of the dataset? if (theDataset.getPublicationDate() == null) { theDataset.setReleaseUser((AuthenticatedUser) getUser()); - } - if ( theDataset.getPublicationDate() == null ) { + theDataset.setPublicationDate(new Timestamp(new Date().getTime())); + + // if there are any embargoed files in this version, we will save + // the latest availability date as the "embargoCitationDate" for future + // reference (if the files are not available yet, as of publishing of + // the dataset, this date will be used as the "ciatation date" of the dataset, + // instead of the publicatonDate, in compliance with the DataCite + // best practices). + // the code below replicates the logic that used to be in the method + // Dataset.getCitationDate() that calculated this adjusted date in real time. + + Timestamp latestEmbargoDate = null; + for (DataFile dataFile : theDataset.getFiles()) { + // this is the first version of the dataset that is being published. + // therefore we can iterate through .getFiles() instead of obtaining + // the DataFiles by going through the FileMetadatas in the current version. + Embargo embargo = dataFile.getEmbargo(); + if (embargo != null) { + // "dataAvailable" is not nullable in the Embargo class, no need for a null check + Timestamp embargoDate = Timestamp.valueOf(embargo.getDateAvailable().atStartOfDay()); + if (latestEmbargoDate == null || latestEmbargoDate.compareTo(embargoDate) < 0) { + latestEmbargoDate = embargoDate; + } + } + } + // the above loop could be easily replaced with a database query; + // but we iterate through .getFiles() elsewhere in the command, when + // updating and/or registering the files, so it should not result in + // an extra performance hit. + theDataset.setEmbargoCitationDate(latestEmbargoDate); } //Clear any external status diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java index f1f27fdcee2..09b33c4efc4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetDatasetStorageSizeCommand.java @@ -7,7 +7,6 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -15,6 +14,7 @@ import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.util.BundleUtil; + import java.io.IOException; import java.util.Collections; import java.util.Map; @@ -38,7 +38,7 @@ public class GetDatasetStorageSizeCommand extends AbstractCommand { public enum Mode { STORAGE, DOWNLOAD - }; + } public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target) { super(aRequest, target); @@ -58,21 +58,20 @@ public GetDatasetStorageSizeCommand(DataverseRequest aRequest, Dataset target, b @Override public Long execute(CommandContext ctxt) throws CommandException { - logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName()); - if (dataset == null) { // should never happen - must indicate some data corruption in the database throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.listing.error"), this); } + logger.fine("getDataverseStorageSize called on " + dataset.getDisplayName()); + try { return ctxt.datasets().findStorageSize(dataset, countCachedFiles, mode, version); } catch (IOException ex) { throw new CommandException(BundleUtil.getStringFromBundle("datasets.api.datasize.ioerror"), this); } - } - + @Override public Map> getRequiredPermissions() { // for data file check permission on owning dataset diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java index 680a5c3aaef..1454a4b1fdd 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestAccessibleDatasetVersionCommand.java @@ -17,29 +17,30 @@ /** * Get the latest version of a dataset a user can view. + * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand{ +public class GetLatestAccessibleDatasetVersionCommand extends AbstractCommand { private final Dataset ds; + private final boolean includeDeaccessioned; public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { + this(aRequest, anAffectedDataset, false); + } + + public GetLatestAccessibleDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - if (ds.getLatestVersion().isDraft() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.ViewUnpublishedDataset)) { return ctxt.engine().submit(new GetDraftDatasetVersionCommand(getRequest(), ds)); } - - return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds)); - + return ctxt.engine().submit(new GetLatestPublishedDatasetVersionCommand(getRequest(), ds, includeDeaccessioned)); } - - - } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java index 18adff2e55c..4e4252fd155 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetLatestPublishedDatasetVersionCommand.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -9,26 +10,31 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; /** - * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand{ +public class GetLatestPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; - + private final boolean includeDeaccessioned; + public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset) { + this(aRequest, anAffectedDataset, false); + } + + public GetLatestPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - for (DatasetVersion dsv: ds.getVersions()) { - if (dsv.isReleased()) { + for (DatasetVersion dsv : ds.getVersions()) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) { return dsv; - } } - return null; } - } \ No newline at end of file + return null; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java index 3efb38e4a91..a87eb8a99a5 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/GetSpecificPublishedDatasetVersionCommand.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -15,27 +16,32 @@ import edu.harvard.iq.dataverse.engine.command.exception.CommandException; /** - * * @author Naomi */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class GetSpecificPublishedDatasetVersionCommand extends AbstractCommand{ +public class GetSpecificPublishedDatasetVersionCommand extends AbstractCommand { private final Dataset ds; private final long majorVersion; private final long minorVersion; - + private boolean includeDeaccessioned; + public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, long majorVersionNum, long minorVersionNum) { + this(aRequest, anAffectedDataset, majorVersionNum, minorVersionNum, false); + } + + public GetSpecificPublishedDatasetVersionCommand(DataverseRequest aRequest, Dataset anAffectedDataset, long majorVersionNum, long minorVersionNum, boolean includeDeaccessioned) { super(aRequest, anAffectedDataset); ds = anAffectedDataset; majorVersion = majorVersionNum; minorVersion = minorVersionNum; + this.includeDeaccessioned = includeDeaccessioned; } @Override public DatasetVersion execute(CommandContext ctxt) throws CommandException { - for (DatasetVersion dsv: ds.getVersions()) { - if (dsv.isReleased()) { + for (DatasetVersion dsv : ds.getVersions()) { + if (dsv.isReleased() || (includeDeaccessioned && dsv.isDeaccessioned() && ctxt.permissions().requestOn(getRequest(), ds).has(Permission.EditDataset))) { if (dsv.getVersionNumber().equals(majorVersion) && dsv.getMinorVersionNumber().equals(minorVersion)) { return dsv; } @@ -43,5 +49,4 @@ public DatasetVersion execute(CommandContext ctxt) throws CommandException { } return null; } - -} \ No newline at end of file +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java index 51283f29156..b93833ffdf9 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/ListVersionsCommand.java @@ -14,6 +14,7 @@ import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.CommandExecutionException; import java.util.LinkedList; import java.util.List; @@ -23,23 +24,57 @@ */ // No permission needed to view published dvObjects @RequiredPermissions({}) -public class ListVersionsCommand extends AbstractCommand>{ - +public class ListVersionsCommand extends AbstractCommand> { + private final Dataset ds; + private final Integer limit; + private final Integer offset; + private final Boolean deepLookup; - public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset) { - super(aRequest, aDataset); - ds = aDataset; - } + public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset) { + this(aRequest, aDataset, null, null); + } + + public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset, Integer offset, Integer limit) { + this(aRequest, aDataset, null, null, false); + } + + public ListVersionsCommand(DataverseRequest aRequest, Dataset aDataset, Integer offset, Integer limit, boolean deepLookup) { + super(aRequest, aDataset); + ds = aDataset; + this.offset = offset; + this.limit = limit; + this.deepLookup = deepLookup; + } - @Override - public List execute(CommandContext ctxt) throws CommandException { - List outputList = new LinkedList<>(); - for ( DatasetVersion dsv : ds.getVersions() ) { - if (dsv.isReleased() || ctxt.permissions().request( getRequest() ).on(ds).has(Permission.EditDataset)) { - outputList.add(dsv); + @Override + public List execute(CommandContext ctxt) throws CommandException { + + boolean includeUnpublished = ctxt.permissions().request(getRequest()).on(ds).has(Permission.EditDataset); + + if (offset == null && limit == null) { + + List outputList = new LinkedList<>(); + for (DatasetVersion dsv : ds.getVersions()) { + if (dsv.isReleased() || includeUnpublished) { + if (deepLookup) { + // @todo: when "deep"/extended lookup is requested, and + // we call .findDeep() to look up each version again, + // there is probably a more economical way to obtain the + // numeric ids of the versions, by a direct single query, + // rather than go through ds.getVersions() like we are now. + dsv = ctxt.datasetVersion().findDeep(dsv.getId()); + if (dsv == null) { + throw new CommandExecutionException("Failed to look up full list of dataset versions", this); + } + } + outputList.add(dsv); + } } - } - return outputList; - } + return outputList; + } else { + // Only a partial list (one "page"-worth) of versions is being requested + return ctxt.datasetVersion().findVersions(ds.getId(), offset, limit, includeUnpublished); + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java index add7b825659..9ef9fed4b1b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseThemeCommand.java @@ -1,6 +1,7 @@ package edu.harvard.iq.dataverse.engine.command.impl; import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.ThemeWidgetFragment; import edu.harvard.iq.dataverse.authorization.Permission; import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; @@ -22,7 +23,6 @@ public class UpdateDataverseThemeCommand extends AbstractCommand { private final Dataverse editedDv; private final File uploadedFile; - private final Path logoPath = Paths.get("../docroot/logos"); private String locate; public UpdateDataverseThemeCommand(Dataverse editedDv, File uploadedFile, DataverseRequest aRequest, String location) { @@ -44,7 +44,7 @@ public UpdateDataverseThemeCommand(Dataverse editedDv, File uploadedFile, Datave public Dataverse execute(CommandContext ctxt) throws CommandException { // Get current dataverse, so we can delete current logo file if necessary Dataverse currentDv = ctxt.dataverses().find(editedDv.getId()); - File logoFileDir = new File(logoPath.toFile(), editedDv.getId().toString()); + File logoFileDir = ThemeWidgetFragment.getLogoDir(editedDv.getId().toString()).toFile(); File currentFile=null; if (locate.equals("FOOTER")){ diff --git a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java index de4317464e6..cdde9fbe0e8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/externaltools/ExternalToolHandler.java @@ -34,6 +34,8 @@ import org.apache.commons.codec.binary.StringUtils; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST; + /** * Handles an operation on a specific file. Requires a file id in order to be * instantiated. Applies logic based on an {@link ExternalTool} specification, @@ -110,7 +112,7 @@ public String handleRequest(boolean preview) { switch (externalTool.getScope()) { case DATASET: callback=SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/datasets/" - + dataset.getId() + "/versions/:latest/toolparams/" + externalTool.getId(); + + dataset.getId() + "/versions/" + DS_VERSION_LATEST + "/toolparams/" + externalTool.getId(); break; case FILE: callback= SystemConfig.getDataverseSiteUrlStatic() + "/api/v1/files/" diff --git a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java index d2613422be9..d6943ec3511 100644 --- a/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/globus/GlobusServiceBean.java @@ -596,13 +596,14 @@ public void globusUpload(String jsonData, ApiToken token, Dataset dataset, Strin Thread.sleep(5000); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + logger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: I think an (parsing) exception should stop the process, shouldn't it? } - logger.info("json: " + JsonUtil.prettyPrint(jsonObject)); + logger.log(Level.INFO, "json: {0}", JsonUtil.prettyPrint(jsonObject)); String taskIdentifier = jsonObject.getString("taskIdentifier"); @@ -808,11 +809,12 @@ private String addFiles(String curlCommand, Logger globusLogger) { sb.append(line); globusLogger.info(" API Output : " + sb.toString()); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(sb.toString())) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(sb.toString()); } catch (Exception jpe) { jpe.printStackTrace(); globusLogger.log(Level.SEVERE, "Error parsing dataset json."); + // TODO: a parsing exception should cause the process to stop. } status = jsonObject.getString("status"); @@ -853,11 +855,12 @@ public void globusDownload(String jsonData, Dataset dataset, User authUser) thro globusLogger.info("Starting an globusDownload "); JsonObject jsonObject = null; - try (StringReader rdr = new StringReader(jsonData)) { - jsonObject = Json.createReader(rdr).readObject(); + try { + jsonObject = JsonUtil.getJsonObject(jsonData); } catch (Exception jpe) { jpe.printStackTrace(); - globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}"); + globusLogger.log(Level.SEVERE, "Error parsing dataset json. Json: {0}", jsonData); + // TODO: stop the process after this parsing exception. } String taskIdentifier = jsonObject.getString("taskIdentifier"); diff --git a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java index 0925c164bf4..0fb7e9f1e6c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/makedatacount/DatasetMetricsServiceBean.java @@ -3,7 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.DatasetServiceBean; -import java.io.StringReader; +import edu.harvard.iq.dataverse.util.json.JsonUtil; + import java.math.BigDecimal; import java.util.ArrayList; import java.util.List; @@ -14,10 +15,8 @@ import jakarta.ejb.EJBException; import jakarta.ejb.Stateless; import jakarta.inject.Named; -import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -125,9 +124,7 @@ public List parseSushiReport(JsonObject report, Dataset dataset) List datasetMetricsDataset = new ArrayList<>(); String globalId = null; Dataset ds = null; - StringReader rdr = new StringReader(reportDataset.toString()); - JsonReader jrdr = Json.createReader(rdr); - JsonObject obj = jrdr.readObject(); + JsonObject obj = JsonUtil.getJsonObject(reportDataset.toString()); String jsonGlobalId = ""; String globalIdType = ""; if (obj.containsKey("dataset-id")) { diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java new file mode 100644 index 00000000000..a2c3f53d59d --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/settings/ConfigCheckService.java @@ -0,0 +1,81 @@ +package edu.harvard.iq.dataverse.settings; + +import edu.harvard.iq.dataverse.util.FileUtil; + +import jakarta.annotation.PostConstruct; +import jakarta.ejb.DependsOn; +import jakarta.ejb.Singleton; +import jakarta.ejb.Startup; +import java.io.IOException; +import java.nio.file.FileSystemException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + +@Startup +@Singleton +@DependsOn("StartupFlywayMigrator") +public class ConfigCheckService { + + private static final Logger logger = Logger.getLogger(ConfigCheckService.class.getCanonicalName()); + + public static class ConfigurationError extends RuntimeException { + public ConfigurationError(String message) { + super(message); + } + } + + @PostConstruct + public void startup() { + if (!checkSystemDirectories()) { + throw new ConfigurationError("Not all configuration checks passed successfully. See logs above."); + } + } + + /** + * In this method, we check the existence and write-ability of all important directories we use during + * normal operations. It does not include checks for the storage system. If directories are not available, + * try to create them (and fail when not allowed to). + * + * @return True if all checks successful, false otherwise. + */ + public boolean checkSystemDirectories() { + Map paths = Map.of( + Path.of(JvmSettings.UPLOADS_DIRECTORY.lookup()), "temporary JSF upload space (see " + JvmSettings.UPLOADS_DIRECTORY.getScopedKey() + ")", + Path.of(FileUtil.getFilesTempDirectory()), "temporary processing space (see " + JvmSettings.FILES_DIRECTORY.getScopedKey() + ")", + Path.of(JvmSettings.DOCROOT_DIRECTORY.lookup()), "docroot space (see " + JvmSettings.DOCROOT_DIRECTORY.getScopedKey() + ")"); + + boolean success = true; + for (Path path : paths.keySet()) { + // Check if the configured path is absolute - avoid potential problems with relative paths this way + if (! path.isAbsolute()) { + logger.log(Level.SEVERE, () -> "Configured directory " + path + " for " + paths.get(path) + " is not absolute"); + success = false; + continue; + } + + if (! Files.exists(path)) { + try { + Files.createDirectories(path); + } catch (IOException e) { + String details; + if (e instanceof FileSystemException) { + details = ": " + e.getClass(); + } else { + details = ""; + } + + logger.log(Level.SEVERE, () -> "Could not create directory " + path + " for " + paths.get(path) + details); + success = false; + } + } else if (!Files.isWritable(path)) { + logger.log(Level.SEVERE, () -> "Directory " + path + " for " + paths.get(path) + " exists, but is not writeable"); + success = false; + } + } + return success; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java index 8a3239a6e0f..cc3272413c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/JvmSettings.java @@ -47,6 +47,8 @@ public enum JvmSettings { // FILES SETTINGS SCOPE_FILES(PREFIX, "files"), FILES_DIRECTORY(SCOPE_FILES, "directory"), + UPLOADS_DIRECTORY(SCOPE_FILES, "uploads"), + DOCROOT_DIRECTORY(SCOPE_FILES, "docroot"), GUESTBOOK_AT_REQUEST(SCOPE_FILES, "guestbook-at-request"), // SOLR INDEX SETTINGS diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java index 0aa403a5116..6b74810eb53 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/SettingsServiceBean.java @@ -4,14 +4,12 @@ import edu.harvard.iq.dataverse.actionlogging.ActionLogServiceBean; import edu.harvard.iq.dataverse.api.ApiBlockingFilter; import edu.harvard.iq.dataverse.util.StringUtil; - +import edu.harvard.iq.dataverse.util.json.JsonUtil; import jakarta.ejb.EJB; import jakarta.ejb.Stateless; import jakarta.inject.Named; -import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; -import jakarta.json.JsonReader; import jakarta.json.JsonValue; import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; @@ -20,7 +18,6 @@ import org.json.JSONException; import org.json.JSONObject; -import java.io.StringReader; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -696,8 +693,8 @@ public Long getValueForCompoundKeyAsLong(Key key, String param){ try { return Long.parseLong(val); } catch (NumberFormatException ex) { - try ( StringReader rdr = new StringReader(val) ) { - JsonObject settings = Json.createReader(rdr).readObject(); + try { + JsonObject settings = JsonUtil.getJsonObject(val); if(settings.containsKey(param)) { return Long.parseLong(settings.getString(param)); } else if(settings.containsKey("default")) { @@ -730,8 +727,8 @@ public Boolean getValueForCompoundKeyAsBoolean(Key key, String param) { return null; } - try (StringReader rdr = new StringReader(val)) { - JsonObject settings = Json.createReader(rdr).readObject(); + try { + JsonObject settings = JsonUtil.getJsonObject(val); if (settings.containsKey(param)) { return Boolean.parseBoolean(settings.getString(param)); } else if (settings.containsKey("default")) { @@ -897,8 +894,7 @@ public Map getBaseMetadataLanguageMap(Map languag if(mlString.isEmpty()) { mlString="[]"; } - JsonReader jsonReader = Json.createReader(new StringReader(mlString)); - JsonArray languages = jsonReader.readArray(); + JsonArray languages = JsonUtil.getJsonArray(mlString); for(JsonValue jv: languages) { JsonObject lang = (JsonObject) jv; languageMap.put(lang.getString("locale"), lang.getString("title")); diff --git a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java index e32b811ee2c..86ae697f771 100644 --- a/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtil.java @@ -3,6 +3,8 @@ import edu.harvard.iq.dataverse.Dataset; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DvObjectContainer; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; +import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; @@ -210,16 +212,17 @@ public static boolean stageFileExists() { } return false; } - + + /** + * Lookup the location where to generate the sitemap. + * + * Note: the location is checked to be configured, does exist and is writeable in + * {@link ConfigCheckService#checkSystemDirectories()} + * + * @return Sitemap storage location ([docroot]/sitemap) + */ private static String getSitemapPathString() { - String sitemapPathString = "/tmp"; - // i.e. /usr/local/glassfish4/glassfish/domains/domain1 - String domainRoot = System.getProperty("com.sun.aas.instanceRoot"); - if (domainRoot != null) { - // Note that we write to a directory called "sitemap" but we serve just "/sitemap.xml" using PrettyFaces. - sitemapPathString = domainRoot + File.separator + "docroot" + File.separator + "sitemap"; - } - return sitemapPathString; + return JvmSettings.DOCROOT_DIRECTORY.lookup() + File.separator + "sitemap"; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 75f265494b3..1ad389fb0e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -35,12 +35,15 @@ import edu.harvard.iq.dataverse.dataset.DatasetThumbnail; import edu.harvard.iq.dataverse.dataset.DatasetUtil; import edu.harvard.iq.dataverse.datasetutility.FileExceedsMaxSizeException; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static edu.harvard.iq.dataverse.datasetutility.FileSizeChecker.bytesToHumanReadable; import edu.harvard.iq.dataverse.ingest.IngestReport; import edu.harvard.iq.dataverse.ingest.IngestServiceBean; import edu.harvard.iq.dataverse.ingest.IngestServiceShapefileHelper; import edu.harvard.iq.dataverse.ingest.IngestableDataChecker; import edu.harvard.iq.dataverse.license.License; +import edu.harvard.iq.dataverse.settings.ConfigCheckService; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.file.BagItFileHandler; import edu.harvard.iq.dataverse.util.file.CreateDataFileResult; @@ -1003,25 +1006,17 @@ public static boolean canIngestAsTabular(String mimeType) { } } + /** + * Return the location where data should be stored temporarily after uploading (UI or API) + * for local processing (ingest, unzip, ...) and transfer to final destination (see storage subsystem). + * + * This location is checked to be configured, does exist, and is writeable via + * {@link ConfigCheckService#checkSystemDirectories()}. + * + * @return String with a path to the temporary location. Will not be null (former versions did to indicate failure) + */ public static String getFilesTempDirectory() { - - String filesRootDirectory = JvmSettings.FILES_DIRECTORY.lookup(); - String filesTempDirectory = filesRootDirectory + "/temp"; - - if (!Files.exists(Paths.get(filesTempDirectory))) { - /* Note that "createDirectories()" must be used - not - * "createDirectory()", to make sure all the parent - * directories that may not yet exist are created as well. - */ - try { - Files.createDirectories(Paths.get(filesTempDirectory)); - } catch (IOException ex) { - logger.severe("Failed to create filesTempDirectory: " + filesTempDirectory ); - return null; - } - } - - return filesTempDirectory; + return JvmSettings.FILES_DIRECTORY.lookup() + File.separator + "temp"; } public static void generateS3PackageStorageIdentifier(DataFile dataFile) { @@ -1745,7 +1740,7 @@ private static String getFileAccessUrl(FileMetadata fileMetadata, String apiLoca private static String getFolderAccessUrl(DatasetVersion version, String currentFolder, String subFolder, String apiLocation, boolean originals) { String datasetId = version.getDataset().getId().toString(); String versionTag = version.getFriendlyVersionNumber(); - versionTag = versionTag.replace("DRAFT", ":draft"); + versionTag = versionTag.replace("DRAFT", DS_VERSION_DRAFT); if (!"".equals(currentFolder)) { subFolder = currentFolder + "/" + subFolder; } diff --git a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java index 2c9b7167059..1826689b892 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/SignpostingResources.java @@ -71,14 +71,14 @@ public String getLinks() { String describedby = "<" + ds.getGlobalId().asURL().toString() + ">;rel=\"describedby\"" + ";type=\"" + "application/vnd.citationstyles.csl+json\""; describedby += ",<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" - + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/json+ld\""; + + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() + ">;rel=\"describedby\"" + ";type=\"application/ld+json\""; valueList.add(describedby); String type = ";rel=\"type\""; type = ";rel=\"type\",<" + defaultFileTypeValue + ">;rel=\"type\""; valueList.add(type); - String licenseString = DatasetUtil.getLicenseURI(workingDatasetVersion) + ";rel=\"license\""; + String licenseString = "<" + DatasetUtil.getLicenseURI(workingDatasetVersion) + ">;rel=\"license\""; valueList.add(licenseString); String linkset = "<" + systemConfig.getDataverseSiteUrl() + "/api/datasets/:persistentId/versions/" @@ -116,7 +116,7 @@ public JsonArrayBuilder getJsonLinkset() { systemConfig.getDataverseSiteUrl() + "/api/datasets/export?exporter=schema.org&persistentId=" + ds.getProtocol() + ":" + ds.getAuthority() + "/" + ds.getIdentifier() ).add( "type", - "application/json+ld" + "application/ld+json" ) ); JsonArrayBuilder linksetJsonObj = Json.createArrayBuilder(); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java index 4ae76a7b8db..c864823176e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/URLTokenUtil.java @@ -14,6 +14,8 @@ import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.authorization.users.ApiToken; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; + public class URLTokenUtil { protected static final Logger logger = Logger.getLogger(URLTokenUtil.class.getCanonicalName()); @@ -177,8 +179,7 @@ private String getTokenValue(String value) { } } if (("DRAFT").equals(versionString)) { - versionString = ":draft"; // send the token needed in api calls that can be substituted for a numeric - // version. + versionString = DS_VERSION_DRAFT; // send the token needed in api calls that can be substituted for a numeric version. } return versionString; case FILE_METADATA_ID: diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index a7aa36f179e..65fe749e554 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -41,7 +41,6 @@ import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonValue; import java.util.function.BiConsumer; import java.util.function.BinaryOperator; @@ -352,11 +351,14 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) { .add("mime",ds.getMime())); } - public static JsonObjectBuilder json(DatasetVersion dsv) { - return json(dsv, null); + public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { + return json(dsv, null, includeFiles); } - public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList) { + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles) { + /* return json(dsv, null, includeFiles, null); + } + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, boolean includeFiles, Long numberOfFiles) {*/ Dataset dataset = dsv.getDataset(); JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dataset.getId()) @@ -372,6 +374,8 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized .add("alternativePersistentId", dataset.getAlternativePersistentIdentifier()) .add("publicationDate", dataset.getPublicationDateFormattedYYYYMMDD()) .add("citationDate", dataset.getCitationDateFormattedYYYYMMDD()); + //.add("numberOfFiles", numberOfFiles); + License license = DatasetUtil.getLicense(dsv); if (license != null) { bld.add("license", jsonLicense(dsv)); @@ -399,7 +403,9 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList) : jsonByBlocks(dsv.getDatasetFields()) ); - bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas())); + if (includeFiles) { + bld.add("files", jsonFileMetadatas(dsv.getFileMetadatas())); + } return bld; } @@ -431,8 +437,8 @@ public static JsonObjectBuilder jsonDataFileList(List dataFiles){ * to the regular `json` method for DatasetVersion? Will anything break? * Unit tests for that method could not be found. */ - public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv) { - JsonObjectBuilder dsvWithCitation = JsonPrinter.json(dsv); + public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv, boolean includeFiles) { + JsonObjectBuilder dsvWithCitation = JsonPrinter.json(dsv, includeFiles); dsvWithCitation.add("citation", dsv.getCitation()); return dsvWithCitation; } @@ -451,7 +457,7 @@ public static JsonObjectBuilder jsonWithCitation(DatasetVersion dsv) { */ public static JsonObjectBuilder jsonAsDatasetDto(DatasetVersion dsv) { JsonObjectBuilder datasetDtoAsJson = JsonPrinter.json(dsv.getDataset()); - datasetDtoAsJson.add("datasetVersion", jsonWithCitation(dsv)); + datasetDtoAsJson.add("datasetVersion", jsonWithCitation(dsv, true)); return datasetDtoAsJson; } @@ -649,6 +655,7 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo .add("pidURL", pidURL) .add("filename", fileName) .add("contentType", df.getContentType()) + .add("friendlyType", df.getFriendlyType()) .add("filesize", df.getFilesize()) .add("description", fileMetadata.getDescription()) .add("categories", getFileCategories(fileMetadata)) @@ -672,9 +679,14 @@ public static JsonObjectBuilder json(DataFile df, FileMetadata fileMetadata, boo //--------------------------------------------- .add("md5", getMd5IfItExists(df.getChecksumType(), df.getChecksumValue())) .add("checksum", getChecksumTypeAndValue(df.getChecksumType(), df.getChecksumValue())) + .add("tabularData", df.isTabularData()) .add("tabularTags", getTabularFileTags(df)) .add("creationDate", df.getCreateDateFormattedYYYYMMDD()) .add("publicationDate", df.getPublicationDateFormattedYYYYMMDD()); + Dataset dfOwner = df.getOwner(); + if (dfOwner != null) { + builder.add("fileAccessRequest", dfOwner.isFileAccessRequest()); + } /* * The restricted state was not included prior to #9175 so to avoid backward * incompatability, it is now only added when generating json for the @@ -1095,6 +1107,30 @@ public Set characteristics() { }; } + public static JsonObjectBuilder json(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + + public static JsonObjectBuilder jsonFileCountPerAccessStatusMap(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + + public static JsonObjectBuilder jsonFileCountPerTabularTagNameMap(Map map) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + for (Map.Entry mapEntry : map.entrySet()) { + jsonObjectBuilder.add(mapEntry.getKey().toString(), mapEntry.getValue()); + } + return jsonObjectBuilder; + } + public static Collector, JsonArrayBuilder> toJsonArray() { return new Collector, JsonArrayBuilder>() { diff --git a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java index 456b829ba61..b104f113db2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/workflows/WorkflowUtil.java @@ -3,7 +3,6 @@ import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.api.Util; -import java.io.StringReader; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; @@ -11,7 +10,7 @@ import jakarta.json.Json; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; - +import edu.harvard.iq.dataverse.util.json.JsonUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.workflow.step.Failure; import edu.harvard.iq.dataverse.workflow.step.Success; @@ -42,8 +41,8 @@ public static JsonArrayBuilder getAllWorkflowComments(DatasetVersion datasetVers } public static WorkflowStepResult parseResponse(String externalData) { - try (StringReader reader = new StringReader(externalData)) { - JsonObject response = Json.createReader(reader).readObject(); + try { + JsonObject response = JsonUtil.getJsonObject(externalData); String status = null; //Lower case is documented, upper case is deprecated if(response.containsKey("status")) { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index ac725caf1b2..79887f7e76c 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -2498,6 +2498,7 @@ dataset.registered=DatasetRegistered dataset.registered.msg=Your dataset is now registered. dataset.notlinked=DatasetNotLinked dataset.notlinked.msg=There was a problem linking this dataset to yours: +dataset.linking.popop.already.linked.note=Note: This dataset is already linked to the following dataverse(s): datasetversion.archive.success=Archival copy of Version successfully submitted datasetversion.archive.failure=Error in submitting an archival copy datasetversion.update.failure=Dataset Version Update failed. Changes are still in the DRAFT version. @@ -2620,6 +2621,7 @@ admin.api.deleteUser.success=Authenticated User {0} deleted. #Files.java files.api.metadata.update.duplicateFile=Filename already exists at {0} files.api.no.draft=No draft available for this file +files.api.only.tabular.supported=This operation is only available for tabular files. #Datasets.java datasets.api.updatePIDMetadata.failure.dataset.must.be.released=Modify Registration Metadata must be run on a published dataset. @@ -2645,7 +2647,10 @@ datasets.api.privateurl.anonymized.error.released=Can't create a URL for anonymi datasets.api.creationdate=Date Created datasets.api.modificationdate=Last Modified Date datasets.api.curationstatus=Curation Status - +datasets.api.version.files.invalid.order.criteria=Invalid order criteria: {0} +datasets.api.version.files.invalid.access.status=Invalid access status: {0} +datasets.api.deaccessionDataset.invalid.version.identifier.error=Only {0} or a specific version can be deaccessioned +datasets.api.deaccessionDataset.invalid.forward.url=Invalid deaccession forward URL: {0} #Dataverses.java dataverses.api.update.default.contributor.role.failure.role.not.found=Role {0} not found. diff --git a/src/main/resources/META-INF/microprofile-config.properties b/src/main/resources/META-INF/microprofile-config.properties index 371f0eed5b1..504b5e46735 100644 --- a/src/main/resources/META-INF/microprofile-config.properties +++ b/src/main/resources/META-INF/microprofile-config.properties @@ -9,10 +9,15 @@ dataverse.build= %ct.dataverse.siteUrl=http://${dataverse.fqdn}:8080 # FILES -dataverse.files.directory=/tmp/dataverse -# The variables are replaced with the environment variables from our base image, but still easy to override -%ct.dataverse.files.directory=${STORAGE_DIR} -%ct.dataverse.files.uploads=${STORAGE_DIR}/uploads +# NOTE: The following uses STORAGE_DIR for both containers and classic installations. When defaulting to +# "com.sun.aas.instanceRoot" if not present, it equals the hardcoded default "." in glassfish-web.xml +# (which is relative to the domain root folder). +# Also, be aware that this props file cannot provide any value for lookups in glassfish-web.xml during servlet +# initialization, as this file will not have been read yet! The names and their values are in sync here and over +# there to ensure the config checker is able to check for the directories (exist + writeable). +dataverse.files.directory=${STORAGE_DIR:/tmp/dataverse} +dataverse.files.uploads=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/uploads +dataverse.files.docroot=${STORAGE_DIR:${com.sun.aas.instanceRoot}}/docroot # SEARCH INDEX dataverse.solr.host=localhost diff --git a/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql b/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql new file mode 100644 index 00000000000..536798015ba --- /dev/null +++ b/src/main/resources/db/migration/V6.0.0.2__9763-embargocitationdate.sql @@ -0,0 +1,14 @@ +-- An aggregated timestamp which is the latest of the availability dates of any embargoed files in the first published version, if present +ALTER TABLE dataset ADD COLUMN IF NOT EXISTS embargoCitationDate timestamp without time zone; +-- ... and an update query that will populate this column for all the published datasets with embargoed files in the first released version: +UPDATE dataset SET embargocitationdate=o.embargocitationdate +FROM (SELECT d.id, MAX(e.dateavailable) AS embargocitationdate +FROM embargo e, dataset d, datafile f, datasetversion v, filemetadata m +WHERE v.dataset_id = d.id +AND v.versionstate = 'RELEASED' +AND v.versionnumber = 1 +AND v.minorversionnumber = 0 +AND f.embargo_id = e.id +AND m.datasetversion_id = v.id +AND m.datafile_id = f.id GROUP BY d.id) o WHERE o.id = dataset.id; +-- (the query follows the logic that used to be in the method Dataset.getCitationDate() that calculated this adjusted date in real time). diff --git a/src/main/webapp/WEB-INF/glassfish-web.xml b/src/main/webapp/WEB-INF/glassfish-web.xml index e56d7013abf..015a309fd6b 100644 --- a/src/main/webapp/WEB-INF/glassfish-web.xml +++ b/src/main/webapp/WEB-INF/glassfish-web.xml @@ -10,13 +10,18 @@ - - - - + + + + + + + diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml index 1b1d77d3057..2f76197e508 100644 --- a/src/main/webapp/dataset.xhtml +++ b/src/main/webapp/dataset.xhtml @@ -1622,6 +1622,12 @@ +
+ +   + + +
- - - - - - -

- #{bundle['file.downloadDialog.tip']} -

- -
- -
-
- -
- -
- - -
- - - -
- - -
-
- - -
-
- - -
-
- - -
-
- - -
-
- - -
-
- - -
-
- - -
-
-
- - -
- - - -
- -
- - - -
-
-
- -
- - - -
-
-
- -
- - - -
-
-
- -
- - - -
-
-
- -
- -
- - - - - - - - - - - -
-
-
-
-
-
-
- - - - - - - - - - - - - - - - - - - - - -
-
-
\ No newline at end of file diff --git a/src/main/webapp/guestbook-terms-popup-fragment.xhtml b/src/main/webapp/guestbook-terms-popup-fragment.xhtml index 69cc9fae55c..34df0c79390 100644 --- a/src/main/webapp/guestbook-terms-popup-fragment.xhtml +++ b/src/main/webapp/guestbook-terms-popup-fragment.xhtml @@ -7,7 +7,8 @@ xmlns:o="http://omnifaces.org/ui" xmlns:jsf="http://xmlns.jcp.org/jsf" xmlns:iqbs="http://xmlns.jcp.org/jsf/composite/iqbs"> - + + @@ -321,4 +322,5 @@
+ diff --git a/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java new file mode 100644 index 00000000000..999fadaae06 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DatasetFieldDefaultValueTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DatasetFieldDefaultValueTest { + private DatasetFieldDefaultValue dataverseContact; + + @BeforeEach + public void before() { + this.dataverseContact = new DatasetFieldDefaultValue(); + this.dataverseContact.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseContact.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetField datasetField = new DatasetField(); + + assertFalse(this.dataverseContact.equals(datasetField)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue(); + dataverseContact1.setId(1L); + + assertTrue(this.dataverseContact.equals(dataverseContact1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DatasetFieldDefaultValue dataverseContact1 = new DatasetFieldDefaultValue(); + dataverseContact1.setId(2L); + + assertFalse(this.dataverseContact.equals(dataverseContact1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java new file mode 100644 index 00000000000..2abb10a485d --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseContactTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseContactTest { + private DataverseContact dataverseContact; + + @BeforeEach + public void before() { + this.dataverseContact = new DataverseContact(); + this.dataverseContact.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseContact.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseContact.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseContact dataverseContact1 = new DataverseContact(); + dataverseContact1.setId(1L); + + assertTrue(this.dataverseContact.equals(dataverseContact1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseContact dataverseContact1 = new DataverseContact(); + dataverseContact1.setId(2L); + + assertFalse(this.dataverseContact.equals(dataverseContact1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java new file mode 100644 index 00000000000..7ae50439c10 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFacetTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseFacetTest { + private DataverseFacet dataverseFacet; + + @BeforeEach + public void before() { + this.dataverseFacet = new DataverseFacet(); + this.dataverseFacet.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseFacet.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseFacet.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseFacet dataverseFacet1 = new DataverseFacet(); + dataverseFacet1.setId(1L); + + assertTrue(this.dataverseFacet.equals(dataverseFacet1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseFacet dataverseFacet1 = new DataverseFacet(); + dataverseFacet1.setId(2L); + + assertFalse(this.dataverseFacet.equals(dataverseFacet1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java new file mode 100644 index 00000000000..b024dc3bfd3 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseFeaturedDataverseTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseFeaturedDataverseTest { + private DataverseFeaturedDataverse dataverseFeaturedDataverse; + + @BeforeEach + public void before() { + this.dataverseFeaturedDataverse = new DataverseFeaturedDataverse(); + this.dataverseFeaturedDataverse.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseFeaturedDataverse.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseFeaturedDataverse.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse(); + dataverseFeaturedDataverse1.setId(1L); + + assertTrue(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseFeaturedDataverse dataverseFeaturedDataverse1 = new DataverseFeaturedDataverse(); + dataverseFeaturedDataverse1.setId(2L); + + assertFalse(this.dataverseFeaturedDataverse.equals(dataverseFeaturedDataverse1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java new file mode 100644 index 00000000000..e6721e34c6f --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/DataverseThemeTest.java @@ -0,0 +1,47 @@ +package edu.harvard.iq.dataverse; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; + +import org.junit.jupiter.api.BeforeEach; + +public class DataverseThemeTest { + private DataverseTheme dataverseTheme; + + @BeforeEach + public void before() { + this.dataverseTheme = new DataverseTheme(); + this.dataverseTheme.setId(1L); + } + + @Test + public void testEqualsWithNull() { + assertFalse(this.dataverseTheme.equals(null)); + } + + @Test + public void testEqualsWithDifferentClass() { + DatasetFieldType datasetFieldType = new DatasetFieldType(); + + assertFalse(this.dataverseTheme.equals(datasetFieldType)); + } + + @Test + public void testEqualsWithSameClassSameId() { + DataverseTheme dataverseTheme1 = new DataverseTheme(); + dataverseTheme1.setId(1L); + + assertTrue(this.dataverseTheme.equals(dataverseTheme1)); + } + + @Test + public void testEqualsWithSameClassDifferentId() { + DataverseTheme dataverseTheme1 = new DataverseTheme(); + dataverseTheme1.setId(2L); + + assertFalse(this.dataverseTheme.equals(dataverseTheme1)); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index b1beddd893f..42e21e53101 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -26,11 +26,9 @@ import static jakarta.ws.rs.core.Response.Status.*; import static org.hamcrest.MatcherAssert.*; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; +import static org.junit.jupiter.api.Assertions.*; /** * @@ -632,33 +630,44 @@ public void testZipUploadAndDownload() throws IOException { } @Test - public void testGetUserPermissionsOnFile() { - Response createUser = UtilIT.createRandomUser(); - createUser.then().assertThat().statusCode(OK.getStatusCode()); - String apiToken = UtilIT.getApiTokenFromResponse(createUser); + public void testGetUserFileAccessRequested() { + // Create new user + Response createUserResponse = UtilIT.createRandomUser(); + createUserResponse.then().assertThat().statusCode(OK.getStatusCode()); + String newUserApiToken = UtilIT.getApiTokenFromResponse(createUserResponse); - Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); - createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + String dataFileId = Integer.toString(tabFile3IdRestricted); - Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); - createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); - int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + // Call with new user and unrequested access file + Response getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); + getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Upload test file - String pathToTestFile = "src/test/resources/images/coffeeshop.png"; - Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); - uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); + assertFalse(userFileAccessRequested); + + // Request file access for the new user + Response requestFileAccessResponse = UtilIT.requestFileAccess(dataFileId, newUserApiToken); + requestFileAccessResponse.then().assertThat().statusCode(OK.getStatusCode()); - // Assert user permissions on file - int testFileId = JsonPath.from(uploadResponse.body().asString()).getInt("data.files[0].dataFile.id"); - Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(testFileId), apiToken); + // Call with new user and requested access file + getUserFileAccessRequestedResponse = UtilIT.getUserFileAccessRequested(dataFileId, newUserApiToken); + getUserFileAccessRequestedResponse.then().assertThat().statusCode(OK.getStatusCode()); + userFileAccessRequested = JsonPath.from(getUserFileAccessRequestedResponse.body().asString()).getBoolean("data"); + assertTrue(userFileAccessRequested); + } + + @Test + public void testGetUserPermissionsOnFile() { + // Call with valid file id + Response getUserPermissionsOnFileResponse = UtilIT.getUserPermissionsOnFile(Integer.toString(basicFileId), apiToken); getUserPermissionsOnFileResponse.then().assertThat().statusCode(OK.getStatusCode()); boolean canDownloadFile = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canDownloadFile"); assertTrue(canDownloadFile); boolean canEditOwnerDataset = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canEditOwnerDataset"); assertTrue(canEditOwnerDataset); + boolean canManageFilePermissions = JsonPath.from(getUserPermissionsOnFileResponse.body().asString()).getBoolean("data.canManageFilePermissions"); + assertTrue(canManageFilePermissions); // Call with invalid file id Response getUserPermissionsOnFileInvalidIdResponse = UtilIT.getUserPermissionsOnFile("testInvalidId", apiToken); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 3b6d4d1ecdf..56bf53c1c99 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -1,24 +1,29 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.FileSearchCriteria; import io.restassured.RestAssured; +import static edu.harvard.iq.dataverse.DatasetVersion.ARCHIVE_NOTE_MAX_LENGTH; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static io.restassured.RestAssured.given; import io.restassured.path.json.JsonPath; import io.restassured.http.ContentType; import io.restassured.response.Response; +import java.time.LocalDate; +import java.time.format.DateTimeFormatter; +import java.util.*; import java.util.logging.Logger; +import org.apache.commons.lang3.RandomStringUtils; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.skyscreamer.jsonassert.JSONAssert; import org.junit.jupiter.api.Disabled; -import java.util.List; -import java.util.Map; import jakarta.json.JsonObject; import static jakarta.ws.rs.core.Response.Status.CREATED; @@ -39,8 +44,6 @@ import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; -import java.util.UUID; - import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.exception.ExceptionUtils; @@ -67,8 +70,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.Files; -import java.util.ArrayList; -import java.util.HashMap; + import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObjectBuilder; @@ -77,6 +79,7 @@ import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; +import static java.lang.Thread.sleep; import static org.junit.jupiter.api.Assertions.assertEquals; import org.hamcrest.CoreMatchers; @@ -117,7 +120,9 @@ public static void setUpClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - + + UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + /* With Dual mode, we can no longer mess with upload methods since native is now required for anything to work Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); @@ -144,7 +149,9 @@ public static void afterClass() { Response removeAnonymizedFieldTypeNames = UtilIT.deleteSetting(SettingsServiceBean.Key.AnonymizedFieldTypeNames); removeAnonymizedFieldTypeNames.then().assertThat() .statusCode(200); - + + UtilIT.deleteSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths); + /* See above Response removeDcmUrl = UtilIT.deleteSetting(SettingsServiceBean.Key.DataCaptureModuleUrl); removeDcmUrl.then().assertThat() @@ -498,7 +505,7 @@ public void testCreatePublishDestroyDataset() { assertTrue(datasetContactFromExport.toString().contains("finch@mailinator.com")); assertTrue(firstValue.toString().contains("finch@mailinator.com")); - Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, ":latest-published", apiToken); + Response getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken); getDatasetVersion.prettyPrint(); getDatasetVersion.then().assertThat() .body("data.datasetId", equalTo(datasetId)) @@ -542,6 +549,18 @@ public void testCreatePublishDestroyDataset() { } assertEquals(datasetPersistentId, XmlPath.from(exportDatasetAsDdi.body().asString()).getString("codeBook.docDscr.citation.titlStmt.IDNo")); + // Test includeDeaccessioned option + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken, false, false); + getDatasetVersion.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + getDatasetVersion = UtilIT.getDatasetVersion(datasetPersistentId, DS_VERSION_LATEST_PUBLISHED, apiToken, false, true); + getDatasetVersion.then().assertThat().statusCode(OK.getStatusCode()); + Response deleteDatasetResponse = UtilIT.destroyDataset(datasetId, apiToken); deleteDatasetResponse.prettyPrint(); assertEquals(200, deleteDatasetResponse.getStatusCode()); @@ -556,6 +575,103 @@ public void testCreatePublishDestroyDataset() { } + /** + * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} + * are already called from other RestAssured tests, in this class and also in FilesIT. + * But this test is dedicated to this api specifically, and focuses on the + * functionality added to it in 6.1. + */ + @Test + public void testDatasetVersionsAPI() { + // Create user + String apiToken = UtilIT.createRandomUserGetToken(); + + // Create user with no permission + String apiTokenNoPerms = UtilIT.createRandomUserGetToken(); + + // Create Collection + String collectionAlias = UtilIT.createRandomCollectionGetAlias(apiToken); + + // Create Dataset + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(collectionAlias, apiToken); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + // Upload file + String pathToFile = "src/main/webapp/resources/images/dataverseproject.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile, apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Check that the file we just uploaded is shown by the versions api: + Response unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .body("data.files.size()", equalTo(1)) + .statusCode(OK.getStatusCode()); + + // Now check that the file is NOT shown, when we ask the versions api to + // skip files: + boolean skipFiles = true; + unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken, skipFiles, false); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .body("data.files", equalTo(null)) + .statusCode(OK.getStatusCode()); + + // Publish collection and dataset + UtilIT.publishDataverseViaNativeApi(collectionAlias, apiToken).then().assertThat().statusCode(OK.getStatusCode()); + UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken).then().assertThat().statusCode(OK.getStatusCode()); + + // Upload another file: + String pathToFile2 = "src/main/webapp/resources/images/cc0.png"; + Response uploadResponse2 = UtilIT.uploadFileViaNative(datasetId.toString(), pathToFile2, apiToken); + uploadResponse2.then().assertThat().statusCode(OK.getStatusCode()); + + // We should now have a published version, and a draft. + + // Call /versions api, *with the owner api token*, make sure both + // versions are listed; also check that the correct numbers of files + // are shown in each version (2 in the draft, 1 in the published). + Response versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(2)) + .body("data[0].files.size()", equalTo(2)) + .body("data[1].files.size()", equalTo(1)); + + // Now call this api with the new (as of 6.1) pagination parameters + Integer offset = 0; + Integer howmany = 1; + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiToken, offset, howmany); + // (the above should return only one version, the draft) + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].files.size()", equalTo(2)); + + // And now call it with an un-privileged token, to make sure only one + // (the published) version is shown: + + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)); + + // And now call the "short", no-files version of the same api + versionsResponse = UtilIT.getDatasetVersions(datasetPid, apiTokenNoPerms, skipFiles); + versionsResponse.prettyPrint(); + versionsResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].files", equalTo(null)); + } + + /** * This test requires the root dataverse to be published to pass. */ @@ -1157,7 +1273,7 @@ public void testPrivateUrl() { assertEquals(OK.getStatusCode(), createPrivateUrlForPostVersionOneDraft.getStatusCode()); // A Contributor has DeleteDatasetDraft - Response deleteDraftVersionAsContributor = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, ":draft", contributorApiToken); + Response deleteDraftVersionAsContributor = UtilIT.deleteDatasetVersionViaNativeApi(datasetId, DS_VERSION_DRAFT, contributorApiToken); deleteDraftVersionAsContributor.prettyPrint(); deleteDraftVersionAsContributor.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3255,7 +3371,7 @@ public void getDatasetVersionCitation() { createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); - Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, ":draft", apiToken); + Response getDatasetVersionCitationResponse = UtilIT.getDatasetVersionCitation(datasetId, DS_VERSION_DRAFT, apiToken); getDatasetVersionCitationResponse.prettyPrint(); getDatasetVersionCitationResponse.then().assertThat() @@ -3265,7 +3381,7 @@ public void getDatasetVersionCitation() { } @Test - public void getVersionFiles() throws IOException { + public void getVersionFiles() throws IOException, InterruptedException { Response createUser = UtilIT.createRandomUser(); createUser.then().assertThat().statusCode(OK.getStatusCode()); String apiToken = UtilIT.getApiTokenFromResponse(createUser); @@ -3297,39 +3413,42 @@ public void getVersionFiles() throws IOException { int testPageSize = 2; // Test page 1 - Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, null, null, apiToken); - - int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(testPageSize, fileMetadatasCount); + Response getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, null, null, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName1)) .body("data[1].label", equalTo(testFileName2)); - // Test page 2 - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize, null, apiToken); - - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + int fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); assertEquals(testPageSize, fileMetadatasCount); + String testFileId1 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[0].dataFile.id"); + String testFileId2 = JsonPath.from(getVersionFilesResponsePaginated.body().asString()).getString("data[1].dataFile.id"); + + // Test page 2 + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, testPageSize, null, null, null, null, null, null, false, apiToken); + getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName3)) .body("data[1].label", equalTo(testFileName4)); - // Test page 3 (last) - getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, testDatasetVersion, testPageSize, testPageSize * 2, null, apiToken); - fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); - assertEquals(1, fileMetadatasCount); + assertEquals(testPageSize, fileMetadatasCount); + + // Test page 3 (last) + getVersionFilesResponsePaginated = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, testPageSize, testPageSize * 2, null, null, null, null, null, null, false, apiToken); getVersionFilesResponsePaginated.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].label", equalTo(testFileName5)); + fileMetadatasCount = getVersionFilesResponsePaginated.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + // Test NameZA order criteria - Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.NameZA.toString(), apiToken); + Response getVersionFilesResponseNameZACriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.NameZA.toString(), false, apiToken); getVersionFilesResponseNameZACriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3340,7 +3459,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Newest order criteria - Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Newest.toString(), apiToken); + Response getVersionFilesResponseNewestCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Newest.toString(), false, apiToken); getVersionFilesResponseNewestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3351,7 +3470,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName1)); // Test Oldest order criteria - Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Oldest.toString(), apiToken); + Response getVersionFilesResponseOldestCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Oldest.toString(), false, apiToken); getVersionFilesResponseOldestCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3362,7 +3481,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Size order criteria - Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Size.toString(), apiToken); + Response getVersionFilesResponseSizeCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Size.toString(), false, apiToken); getVersionFilesResponseSizeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3373,7 +3492,7 @@ public void getVersionFiles() throws IOException { .body("data[4].label", equalTo(testFileName4)); // Test Type order criteria - Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, DatasetVersionServiceBean.FileMetadatasOrderCriteria.Type.toString(), apiToken); + Response getVersionFilesResponseTypeCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, DatasetVersionFilesServiceBean.FileOrderCriteria.Type.toString(), false, apiToken); getVersionFilesResponseTypeCriteria.then().assertThat() .statusCode(OK.getStatusCode()) @@ -3385,9 +3504,602 @@ public void getVersionFiles() throws IOException { // Test invalid order criteria String invalidOrderCriteria = "invalidOrderCriteria"; - Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, invalidOrderCriteria, apiToken); + Response getVersionFilesResponseInvalidOrderCriteria = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, null, invalidOrderCriteria, false, apiToken); getVersionFilesResponseInvalidOrderCriteria.then().assertThat() .statusCode(BAD_REQUEST.getStatusCode()) .body("message", equalTo("Invalid order criteria: " + invalidOrderCriteria)); + + // Test Content Type + Response getVersionFilesResponseContentType = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, "image/png", null, null, null, null, null, false, apiToken); + + getVersionFilesResponseContentType.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName5)); + + fileMetadatasCount = getVersionFilesResponseContentType.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Category Name + String testCategory = "testCategory"; + Response setFileCategoriesResponse = UtilIT.setFileCategories(testFileId1, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + setFileCategoriesResponse = UtilIT.setFileCategories(testFileId2, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseCategoryName = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, testCategory, null, null, null, false, apiToken); + + getVersionFilesResponseCategoryName.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)); + + fileMetadatasCount = getVersionFilesResponseCategoryName.jsonPath().getList("data").size(); + assertEquals(2, fileMetadatasCount); + + // Test Access Status Restricted + Response restrictFileResponse = UtilIT.restrictFile(String.valueOf(testFileId1), true, apiToken); + restrictFileResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseRestricted = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, FileSearchCriteria.FileAccessStatus.Restricted.toString(), null, null, null, null, false, apiToken); + + getVersionFilesResponseRestricted.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseRestricted.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Access Status Embargoed + UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); + String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + + // Create embargo for test file 1 (Embargoed and Restricted) + Response createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId1), activeEmbargoDate, apiToken); + + createActiveFileEmbargoResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + // Create embargo for test file 2 (Embargoed and Public) + createActiveFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(testFileId2), activeEmbargoDate, apiToken); + + createActiveFileEmbargoResponse.then().assertThat() + .statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseEmbargoedThenPublic = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString(), null, null, null, null, false, apiToken); + + getVersionFilesResponseEmbargoedThenPublic.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName2)); + + fileMetadatasCount = getVersionFilesResponseEmbargoedThenPublic.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + Response getVersionFilesResponseEmbargoedThenRestricted = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, FileSearchCriteria.FileAccessStatus.EmbargoedThenRestricted.toString(), null, null, null, null, false, apiToken); + + getVersionFilesResponseEmbargoedThenRestricted.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseEmbargoedThenRestricted.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Access Status Public + Response getVersionFilesResponsePublic = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, null, false, apiToken); + + getVersionFilesResponsePublic.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName3)) + .body("data[1].label", equalTo(testFileName4)) + .body("data[2].label", equalTo(testFileName5)); + + fileMetadatasCount = getVersionFilesResponsePublic.jsonPath().getList("data").size(); + assertEquals(3, fileMetadatasCount); + + // Test invalid access status + String invalidStatus = "invalidStatus"; + Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, invalidStatus, null, null, null, null, false, apiToken); + getVersionFilesResponseInvalidStatus.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); + + // Test Search Text + Response getVersionFilesResponseSearchText = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST, null, null, null, null, null, null, "test_1", null, false, apiToken); + + getVersionFilesResponseSearchText.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)); + + fileMetadatasCount = getVersionFilesResponseSearchText.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFilesResponseNoDeaccessioned = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, false, apiToken); + getVersionFilesResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFilesResponseDeaccessioned = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, true, apiToken); + getVersionFilesResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + getVersionFilesResponseDeaccessioned.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo(testFileName1)) + .body("data[1].label", equalTo(testFileName2)) + .body("data[2].label", equalTo(testFileName3)) + .body("data[3].label", equalTo(testFileName4)) + .body("data[4].label", equalTo(testFileName5)); + + // Test Tabular Tag Name + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response getVersionFilesResponseTabularTagName = UtilIT.getVersionFiles(datasetId, testDatasetVersion, null, null, null, null, null, tabularTagName, null, null, false, apiToken); + + getVersionFilesResponseTabularTagName.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data[0].label", equalTo("test.tab")); + + fileMetadatasCount = getVersionFilesResponseTabularTagName.jsonPath().getList("data").size(); + assertEquals(1, fileMetadatasCount); + + // Test that the dataset files for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getDatasetVersionResponse = UtilIT.getVersionFiles(datasetId, "1.0", null, null, null, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } + + @Test + public void getVersionFileCounts() throws IOException, InterruptedException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Creating test files + String testFileName1 = "test_1.txt"; + String testFileName2 = "test_2.txt"; + String testFileName3 = "test_3.png"; + + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[50], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[200], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName3, new byte[100], apiToken); + + // Creating a categorized test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + String testCategory = "testCategory"; + Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, List.of(testCategory)); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Setting embargo for file (Embargo and Public) + UtilIT.setSetting(SettingsServiceBean.Key.MaxEmbargoDurationInMonths, "12"); + String activeEmbargoDate = LocalDate.now().plusMonths(6).format(DateTimeFormatter.ofPattern("yyyy-MM-dd")); + Response createFileEmbargoResponse = UtilIT.createFileEmbargo(datasetId, Integer.parseInt(dataFileId), activeEmbargoDate, apiToken); + createFileEmbargoResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Getting the file counts and assert each count + Response getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, null, null, null, null, false, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + JsonPath responseJsonPath = getVersionFileCountsResponse.jsonPath(); + LinkedHashMap responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + LinkedHashMap responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + LinkedHashMap responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); + LinkedHashMap responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(4, (Integer) responseJsonPath.get("data.total")); + assertEquals(2, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); + assertEquals(2, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test content type criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, "image/png", null, null, null, null, false, apiToken); + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(2, (Integer) responseJsonPath.get("data.total")); + assertEquals(2, responseCountPerContentTypeMap.get("image/png")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); + assertEquals(2, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test access status criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, null, false, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(3, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(0, responseCountPerTabularTagNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test invalid access status + String invalidStatus = "invalidStatus"; + Response getVersionFilesResponseInvalidStatus = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, invalidStatus, null, null, null, false, apiToken); + getVersionFilesResponseInvalidStatus.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(BundleUtil.getStringFromBundle("datasets.api.version.files.invalid.access.status", List.of(invalidStatus)))); + + // Test category name criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, null, testCategory, null, null, false, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(1, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerCategoryNameMap.get(testCategory)); + assertEquals(0, responseCountPerTabularTagNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.EmbargoedThenPublic.toString())); + + // Test search text criteria + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, null, null, null, "test", false, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(3, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("image/png")); + assertEquals(2, responseCountPerContentTypeMap.get("text/plain")); + assertEquals(2, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(0, responseCountPerTabularTagNameMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(3, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test tabular tag name criteria + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + String tabularTagName = "Survey"; + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(tabularTagName)); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + getVersionFileCountsResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST, null, null, null, tabularTagName, null, false, apiToken); + + getVersionFileCountsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponse.jsonPath(); + responseCountPerContentTypeMap = responseJsonPath.get("data.perContentType"); + responseCountPerCategoryNameMap = responseJsonPath.get("data.perCategoryName"); + responseCountPerTabularTagNameMap = responseJsonPath.get("data.perTabularTagName"); + responseCountPerAccessStatusMap = responseJsonPath.get("data.perAccessStatus"); + + assertEquals(1, (Integer) responseJsonPath.get("data.total")); + assertEquals(1, responseCountPerContentTypeMap.get("text/tab-separated-values")); + assertEquals(1, responseCountPerContentTypeMap.size()); + assertEquals(0, responseCountPerCategoryNameMap.size()); + assertEquals(1, responseCountPerTabularTagNameMap.size()); + assertEquals(1, responseCountPerTabularTagNameMap.get(tabularTagName)); + assertEquals(1, responseCountPerAccessStatusMap.size()); + assertEquals(1, responseCountPerAccessStatusMap.get(FileSearchCriteria.FileAccessStatus.Public.toString())); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, false, apiToken); + getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFileCountsResponseDeaccessioned = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, true, apiToken); + getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + responseJsonPath = getVersionFileCountsResponseDeaccessioned.jsonPath(); + assertEquals(5, (Integer) responseJsonPath.get("data.total")); + + // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getDatasetVersionResponse = UtilIT.getVersionFileCounts(datasetId, "1.0", null, null, null, null, null, true, null); + getDatasetVersionResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } + + @Test + public void deaccessionDataset() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + String testDeaccessionReason = "Test deaccession reason."; + String testDeaccessionForwardURL = "http://demo.dataverse.org"; + + // Test that draft and latest version constants are not allowed and a bad request error is received + String expectedInvalidVersionIdentifierError = BundleUtil.getStringFromBundle("datasets.api.deaccessionDataset.invalid.version.identifier.error", List.of(DS_VERSION_LATEST_PUBLISHED)); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_DRAFT, testDeaccessionReason, testDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(expectedInvalidVersionIdentifierError)); + + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST, testDeaccessionReason, testDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(expectedInvalidVersionIdentifierError)); + + // Test that a not found error occurs when there is no published version available + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Publish test dataset + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Test that a bad request error is received when the forward URL exceeds ARCHIVE_NOTE_MAX_LENGTH + String testInvalidDeaccessionForwardURL = RandomStringUtils.randomAlphabetic(ARCHIVE_NOTE_MAX_LENGTH + 1); + + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testInvalidDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", containsString(testInvalidDeaccessionForwardURL)); + + // Test that the dataset is successfully deaccessioned when published and valid deaccession params are sent + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Test that a not found error occurs when the only published version has already been deaccessioned + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, testDeaccessionForwardURL, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Test that a dataset can be deaccessioned without forward URL + createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, testDeaccessionReason, null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + } + + @Test + public void getDownloadSize() throws IOException, InterruptedException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String datasetPersistentId = JsonPath.from(createDatasetResponse.body().asString()).getString("data.persistentId"); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Creating test text files + String testFileName1 = "test_1.txt"; + String testFileName2 = "test_2.txt"; + + int testFileSize1 = 50; + int testFileSize2 = 200; + + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName1, new byte[testFileSize1], apiToken); + UtilIT.createAndUploadTestFile(datasetPersistentId, testFileName2, new byte[testFileSize2], apiToken); + + int expectedTextFilesStorageSize = testFileSize1 + testFileSize2; + + // Get the total size when there are no tabular files + Response getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedTextFilesStorageSize)); + + // Upload test tabular file + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + int tabularOriginalSize = 157; + + // Ensure tabular file is ingested + Thread.sleep(2000); + + // Get the total size ignoring the original tabular file sizes + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Archival.toString(), false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()); + + int actualSizeIgnoringOriginalTabularSizes = Integer.parseInt(getDownloadSizeResponse.getBody().jsonPath().getString("data.storageSize")); + + // Assert that the size has been incremented with the last uploaded file + assertTrue(actualSizeIgnoringOriginalTabularSizes > expectedTextFilesStorageSize); + + // Get the total size including only original sizes and ignoring archival sizes for tabular files + int expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedTextFilesStorageSize; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); + + // Get the total size including both the original and archival tabular file sizes + int tabularArchivalSize = actualSizeIgnoringOriginalTabularSizes - expectedTextFilesStorageSize; + int expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedTextFilesStorageSize; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); + + // Get the total size sending invalid file download size mode + String invalidMode = "invalidMode"; + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, invalidMode, false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo("Invalid mode: " + invalidMode)); + + // Upload second test tabular file (same source as before) + uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Ensure tabular file is ingested + Thread.sleep(2000); + + // Get the total size including only original sizes and ignoring archival sizes for tabular files + expectedSizeIncludingOnlyOriginalForTabular = tabularOriginalSize + expectedSizeIncludingOnlyOriginalForTabular; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.Original.toString(), false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingOnlyOriginalForTabular)); + + // Get the total size including both the original and archival tabular file sizes + expectedSizeIncludingAllSizes = tabularArchivalSize + tabularOriginalSize + expectedSizeIncludingAllSizes; + + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizes)); + + // Get the total size including both the original and archival tabular file sizes with search criteria + getDownloadSizeResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST, "text/plain", FileSearchCriteria.FileAccessStatus.Public.toString(), null, null, "test_", DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); + // We exclude tabular sizes from the expected result since the search criteria filters by content type "text/plain" and search text "test_" + int expectedSizeIncludingAllSizesAndApplyingCriteria = testFileSize1 + testFileSize2; + getDownloadSizeResponse.then().assertThat().statusCode(OK.getStatusCode()) + .body("data.storageSize", equalTo(expectedSizeIncludingAllSizesAndApplyingCriteria)); + + // Test Deaccessioned + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response deaccessionDatasetResponse = UtilIT.deaccessionDataset(datasetId, DS_VERSION_LATEST_PUBLISHED, "Test deaccession reason.", null, apiToken); + deaccessionDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // includeDeaccessioned false + Response getVersionFileCountsResponseNoDeaccessioned = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), false, apiToken); + getVersionFileCountsResponseNoDeaccessioned.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // includeDeaccessioned true + Response getVersionFileCountsResponseDeaccessioned = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, apiToken); + getVersionFileCountsResponseDeaccessioned.then().assertThat().statusCode(OK.getStatusCode()); + + // Test that the dataset file counts for a deaccessioned dataset cannot be accessed by a guest + // By latest published version + Response getVersionFileCountsGuestUserResponse = UtilIT.getDownloadSize(datasetId, DS_VERSION_LATEST_PUBLISHED, null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, null); + getVersionFileCountsGuestUserResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + // By specific version 1.0 + getVersionFileCountsGuestUserResponse = UtilIT.getDownloadSize(datasetId, "1.0", null, null, null, null, null, DatasetVersionFilesServiceBean.FileDownloadSizeMode.All.toString(), true, null); + getVersionFileCountsGuestUserResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } + + @Test + public void testGetUserPermissionsOnDataset() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Call with valid dataset id + Response getUserPermissionsOnDatasetResponse = UtilIT.getUserPermissionsOnDataset(Integer.toString(datasetId), apiToken); + getUserPermissionsOnDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean canViewUnpublishedDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canViewUnpublishedDataset"); + assertTrue(canViewUnpublishedDataset); + boolean canEditDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canEditDataset"); + assertTrue(canEditDataset); + boolean canPublishDataset = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canPublishDataset"); + assertTrue(canPublishDataset); + boolean canManageDatasetPermissions = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canManageDatasetPermissions"); + assertTrue(canManageDatasetPermissions); + boolean canDeleteDatasetDraft = JsonPath.from(getUserPermissionsOnDatasetResponse.body().asString()).getBoolean("data.canDeleteDatasetDraft"); + assertTrue(canDeleteDatasetDraft); + + // Call with invalid dataset id + Response getUserPermissionsOnDatasetInvalidIdResponse = UtilIT.getUserPermissionsOnDataset("testInvalidId", apiToken); + getUserPermissionsOnDatasetInvalidIdResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 09052f9e4ea..78ece6ecc42 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -17,11 +17,13 @@ import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; -import static jakarta.ws.rs.core.Response.Status.CREATED; -import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import jakarta.ws.rs.core.Response.Status; -import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; import static jakarta.ws.rs.core.Response.Status.OK; +import static jakarta.ws.rs.core.Response.Status.CREATED; +import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; +import static jakarta.ws.rs.core.Response.Status.NOT_FOUND; +import static jakarta.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; @@ -144,6 +146,44 @@ public void testMinimalDataverse() throws FileNotFoundException { deleteDataverse.then().assertThat().statusCode(OK.getStatusCode()); } + /** + * A regular user can create a Dataverse Collection and access its + * GuestbookResponses by DV alias or ID. + * A request for a non-existent Dataverse's GuestbookResponses returns + * Not Found. + * A regular user cannot access the guestbook responses for a Dataverse + * that they do not have permissions for, like the root Dataverse. + */ + @Test + public void testGetGuestbookResponses() { + Response createUser = UtilIT.createRandomUser(); + createUser.prettyPrint(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response create = UtilIT.createRandomDataverse(apiToken); + create.prettyPrint(); + create.then().assertThat().statusCode(CREATED.getStatusCode()); + String alias = UtilIT.getAliasFromResponse(create); + Integer dvId = UtilIT.getDataverseIdFromResponse(create); + + logger.info("Request guestbook responses for non-existent Dataverse"); + Response getResponsesByBadAlias = UtilIT.getGuestbookResponses("-1", null, apiToken); + getResponsesByBadAlias.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + logger.info("Request guestbook responses for existent Dataverse by alias"); + Response getResponsesByAlias = UtilIT.getGuestbookResponses(alias, null, apiToken); + getResponsesByAlias.then().assertThat().statusCode(OK.getStatusCode()); + + logger.info("Request guestbook responses for existent Dataverse by ID"); + Response getResponsesById = UtilIT.getGuestbookResponses(dvId.toString(), null, apiToken); + getResponsesById.then().assertThat().statusCode(OK.getStatusCode()); + + logger.info("Request guestbook responses for root Dataverse by alias"); + getResponsesById = UtilIT.getGuestbookResponses("root", null, apiToken); + getResponsesById.prettyPrint(); + getResponsesById.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); + } + @Test public void testNotEnoughJson() { Response createUser = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java index 598ba36c1e1..927efb0b142 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DownloadFilesIT.java @@ -16,6 +16,9 @@ import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import java.util.zip.ZipOutputStream; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_LATEST_PUBLISHED; import static jakarta.ws.rs.core.Response.Status.CREATED; import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import static jakarta.ws.rs.core.Response.Status.OK; @@ -188,8 +191,7 @@ public void downloadAllFilesByVersion() throws IOException { HashSet expectedFiles6 = new HashSet<>(Arrays.asList("CODE_OF_CONDUCT.md", "LICENSE.md", "MANIFEST.TXT", "README.md", "CONTRIBUTING.md")); assertEquals(expectedFiles6, filenamesFound6); - String datasetVersionLatestPublished = ":latest-published"; - Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, datasetVersionLatestPublished, apiToken); + Response downloadFiles9 = UtilIT.downloadFiles(datasetPid, DS_VERSION_LATEST_PUBLISHED, apiToken); downloadFiles9.then().assertThat() .statusCode(OK.getStatusCode()); @@ -200,8 +202,7 @@ public void downloadAllFilesByVersion() throws IOException { assertEquals(expectedFiles7, filenamesFound7); // Guests cannot download draft versions. - String datasetVersionDraft = ":draft"; - Response downloadFiles10 = UtilIT.downloadFiles(datasetPid, datasetVersionDraft, null); + Response downloadFiles10 = UtilIT.downloadFiles(datasetPid, DS_VERSION_DRAFT, null); downloadFiles10.prettyPrint(); downloadFiles10.then().assertThat() .statusCode(UNAUTHORIZED.getStatusCode()) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java index 0a16bca7008..16726485dee 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/FilesIT.java @@ -2,12 +2,16 @@ import io.restassured.RestAssured; import io.restassured.response.Response; + +import java.util.List; import java.util.logging.Logger; import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeAll; import io.restassured.path.json.JsonPath; + +import static edu.harvard.iq.dataverse.api.ApiConstants.DS_VERSION_DRAFT; import static io.restassured.path.json.JsonPath.with; import io.restassured.path.xml.XmlPath; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -30,16 +34,12 @@ import static jakarta.ws.rs.core.Response.Status.*; import org.hamcrest.CoreMatchers; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.startsWith; -import static org.hamcrest.CoreMatchers.nullValue; import org.hamcrest.Matchers; import org.junit.jupiter.api.AfterAll; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; -import static org.junit.jupiter.api.Assertions.assertNull; -import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.hamcrest.CoreMatchers.*; +import static org.hamcrest.CoreMatchers.hasItem; +import static org.junit.jupiter.api.Assertions.*; public class FilesIT { @@ -1356,7 +1356,7 @@ public void testDataSizeInDataverse() throws InterruptedException { .statusCode(OK.getStatusCode()); String apiTokenRando = createUserGetToken(); - Response datasetStorageSizeResponseDraft = UtilIT.findDatasetDownloadSize(datasetId.toString(), ":draft", apiTokenRando); + Response datasetStorageSizeResponseDraft = UtilIT.findDatasetDownloadSize(datasetId.toString(), DS_VERSION_DRAFT, apiTokenRando); datasetStorageSizeResponseDraft.prettyPrint(); assertEquals(UNAUTHORIZED.getStatusCode(), datasetStorageSizeResponseDraft.getStatusCode()); Response publishDatasetResp = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); @@ -1609,7 +1609,7 @@ public void test_CrawlableAccessToDatasetFiles() { // Expected values in the output: String expectedTitleTopFolder = "Index of folder /"; String expectedLinkTopFolder = folderName + "/"; - String expectedLinkAhrefTopFolder = "/api/datasets/"+datasetId+"/dirindex/?version=:draft&folder=subfolder"; + String expectedLinkAhrefTopFolder = "/api/datasets/"+datasetId+"/dirindex/?version=" + DS_VERSION_DRAFT + "&folder=subfolder"; String expectedTitleSubFolder = "Index of folder /" + folderName; String expectedLinkAhrefSubFolder = "/api/access/datafile/" + folderName + "/" + dataFileId; @@ -1989,7 +1989,7 @@ public void testDeleteFile() { deleteResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 2 deleted from post v1.0 draft - Response postv1draft = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft.prettyPrint(); postv1draft.then().assertThat() .body("data.files.size()", equalTo(1)) @@ -2011,7 +2011,7 @@ public void testDeleteFile() { downloadResponse2.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 still in post v1.0 draft - Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft2 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft2.prettyPrint(); postv1draft2.then().assertThat() .body("data.files[0].dataFile.filename", equalTo("orcid_16x16.png")) @@ -2026,7 +2026,7 @@ public void testDeleteFile() { deleteResponse3.then().assertThat().statusCode(OK.getStatusCode()); // Check file 3 deleted from post v1.0 draft - Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, ":draft", apiToken); + Response postv1draft3 = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken); postv1draft3.prettyPrint(); postv1draft3.then().assertThat() .body("data.files[0]", equalTo(null)) @@ -2211,4 +2211,155 @@ public void testGetFileDataTables() throws InterruptedException { getFileDataTablesForTabularFileResponse = UtilIT.getFileDataTables(testTabularFileId, randomUserApiToken); getFileDataTablesForTabularFileResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); } + + @Test + public void testSetFileCategories() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Set categories + String testCategory1 = "testCategory1"; + String testCategory2 = "testCategory2"; + List testCategories = List.of(testCategory1, testCategory2); + Response setFileCategoriesResponse = UtilIT.setFileCategories(dataFileId, apiToken, testCategories); + setFileCategoriesResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file data and check for new categories + Response getFileDataResponse = UtilIT.getFileData(dataFileId, apiToken); + getFileDataResponse.prettyPrint(); + getFileDataResponse.then().assertThat() + .body("data.categories", hasItem(testCategory1)) + .body("data.categories", hasItem(testCategory2)) + .statusCode(OK.getStatusCode()); + } + + @Test + public void testSetFileTabularTags() throws InterruptedException { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload tabular file + String pathToTabularTestFile = "src/test/resources/tab/test.tab"; + Response uploadTabularFileResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTabularTestFile, Json.createObjectBuilder().build(), apiToken); + uploadTabularFileResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String tabularFileId = uploadTabularFileResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Ensure tabular file is ingested + sleep(2000); + + // Set tabular tags + String testTabularTag1 = "Survey"; + String testTabularTag2 = "Genomics"; + // We repeat one to test that it is not duplicated + String testTabularTag3 = "Genomics"; + List testTabularTags = List.of(testTabularTag1, testTabularTag2, testTabularTag3); + Response setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, testTabularTags); + setFileTabularTagsResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Get file data and check for new tabular tags + Response getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); + getFileDataResponse.then().assertThat() + .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) + .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) + .statusCode(OK.getStatusCode()); + + int actualTabularTagsCount = getFileDataResponse.jsonPath().getList("data.dataFile.tabularTags").size(); + assertEquals(2, actualTabularTagsCount); + + // Set invalid tabular tag + String testInvalidTabularTag = "Invalid"; + setFileTabularTagsResponse = UtilIT.setFileTabularTags(tabularFileId, apiToken, List.of(testInvalidTabularTag)); + setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + + // Get file data and check tabular tags are unaltered + getFileDataResponse = UtilIT.getFileData(tabularFileId, apiToken); + getFileDataResponse.then().assertThat() + .body("data.dataFile.tabularTags", hasItem(testTabularTag1)) + .body("data.dataFile.tabularTags", hasItem(testTabularTag2)) + .statusCode(OK.getStatusCode()); + + actualTabularTagsCount = getFileDataResponse.jsonPath().getList("data.dataFile.tabularTags").size(); + assertEquals(2, actualTabularTagsCount); + + // Should receive an error when calling the endpoint for a non-tabular file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String nonTabularFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + setFileTabularTagsResponse = UtilIT.setFileTabularTags(nonTabularFileId, apiToken, List.of(testInvalidTabularTag)); + setFileTabularTagsResponse.then().assertThat().statusCode(BAD_REQUEST.getStatusCode()); + } + + @Test + public void testGetHasBeenDeleted() { + Response createUser = UtilIT.createRandomUser(); + createUser.then().assertThat().statusCode(OK.getStatusCode()); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + + Response createDataverseResponse = UtilIT.createRandomDataverse(apiToken); + createDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, apiToken); + createDatasetResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + int datasetId = JsonPath.from(createDatasetResponse.body().asString()).getInt("data.id"); + + // Upload test file + String pathToTestFile = "src/test/resources/images/coffeeshop.png"; + Response uploadResponse = UtilIT.uploadFileViaNative(Integer.toString(datasetId), pathToTestFile, Json.createObjectBuilder().build(), apiToken); + uploadResponse.then().assertThat().statusCode(OK.getStatusCode()); + + String dataFileId = uploadResponse.getBody().jsonPath().getString("data.files[0].dataFile.id"); + + // Publish dataverse and dataset + Response publishDataverseResponse = UtilIT.publishDataverseViaNativeApi(dataverseAlias, apiToken); + publishDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + Response publishDatasetResponse = UtilIT.publishDatasetViaNativeApi(datasetId, "major", apiToken); + publishDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the file has not been deleted + Response getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); + getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); + boolean fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); + assertFalse(fileHasBeenDeleted); + + // Delete test file + Response deleteFileInDatasetResponse = UtilIT.deleteFileInDataset(Integer.parseInt(dataFileId), apiToken); + deleteFileInDatasetResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the file has been deleted + getHasBeenDeletedResponse = UtilIT.getHasBeenDeleted(dataFileId, apiToken); + getHasBeenDeletedResponse.then().assertThat().statusCode(OK.getStatusCode()); + fileHasBeenDeleted = JsonPath.from(getHasBeenDeletedResponse.body().asString()).getBoolean("data"); + assertTrue(fileHasBeenDeleted); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java index 17eba4770f1..75f514f3398 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/SignpostingIT.java @@ -80,6 +80,7 @@ public void testSignposting() { assertTrue(linkHeader.contains(datasetPid)); assertTrue(linkHeader.contains("cite-as")); assertTrue(linkHeader.contains("describedby")); + assertTrue(linkHeader.contains(";rel=\"license\"")); Pattern pattern = Pattern.compile("<([^<]*)> ; rel=\"linkset\";type=\"application\\/linkset\\+json\""); Matcher matcher = pattern.matcher(linkHeader); @@ -92,7 +93,7 @@ public void testSignposting() { String responseString = linksetResponse.getBody().asString(); - JsonObject data = JsonUtil.getJsonObject(responseString).getJsonObject("data"); + JsonObject data = JsonUtil.getJsonObject(responseString); JsonObject lso = data.getJsonArray("linkset").getJsonObject(0); System.out.println("Linkset: " + lso.toString()); @@ -101,6 +102,16 @@ public void testSignposting() { assertTrue(lso.getString("anchor").indexOf("/dataset.xhtml?persistentId=" + datasetPid) > 0); assertTrue(lso.containsKey("describedby")); + // Test export URL from link header + // regex inspired by https://stackoverflow.com/questions/68860255/how-to-match-the-closest-opening-and-closing-brackets + Pattern exporterPattern = Pattern.compile("[<\\[][^()\\[\\]]*?exporter=schema.org[^()\\[\\]]*[>\\]]"); + Matcher exporterMatcher = exporterPattern.matcher(linkHeader); + exporterMatcher.find(); + + Response exportDataset = UtilIT.exportDataset(datasetPid, "schema.org"); + exportDataset.prettyPrint(); + exportDataset.then().assertThat().statusCode(OK.getStatusCode()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index f61b392c898..e3a7fd0cfc3 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -11,6 +11,7 @@ import jakarta.json.JsonObjectBuilder; import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +import static jakarta.ws.rs.core.Response.Status.CREATED; import java.nio.charset.StandardCharsets; import java.nio.file.Files; @@ -38,6 +39,7 @@ import org.hamcrest.Description; import org.hamcrest.Matcher; +import static edu.harvard.iq.dataverse.api.ApiConstants.*; import static io.restassured.path.xml.XmlPath.from; import static io.restassured.RestAssured.given; import edu.harvard.iq.dataverse.DatasetField; @@ -116,6 +118,16 @@ public static Response createRandomUser() { return createRandomUser("user"); } + /** + * A convenience method for creating a random test user, when all you need + * is the api token. + * @return apiToken + */ + public static String createRandomUserGetToken(){ + Response createUser = createRandomUser(); + return getApiTokenFromResponse(createUser); + } + public static Response createUser(String username, String email) { logger.info("Creating user " + username); String userAsJson = getUserAsJsonString(username, username, username, email); @@ -286,7 +298,7 @@ static String getAliasFromResponse(Response createDataverseResponse) { static Integer getDataverseIdFromResponse(Response createDataverseResponse) { JsonPath createdDataverse = JsonPath.from(createDataverseResponse.body().asString()); int dataverseId = createdDataverse.getInt("data.id"); - logger.info("Id found in create dataverse response: " + createdDataverse); + logger.info("Id found in create dataverse response: " + dataverseId); return dataverseId; } @@ -366,12 +378,35 @@ static Response createRandomDataverse(String apiToken) { return createDataverse(alias, category, apiToken); } + /** + * A convenience method for creating a random collection and getting its + * alias in one step. + * @param apiToken + * @return alias + */ + static String createRandomCollectionGetAlias(String apiToken){ + + Response createCollectionResponse = createRandomDataverse(apiToken); + //createDataverseResponse.prettyPrint(); + createCollectionResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + return UtilIT.getAliasFromResponse(createCollectionResponse); + } + static Response showDataverseContents(String alias, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .when().get("/api/dataverses/" + alias + "/contents"); } - + + static Response getGuestbookResponses(String dataverseAlias, Long guestbookId, String apiToken) { + RequestSpecification requestSpec = given() + .header(API_TOKEN_HTTP_HEADER, apiToken); + if (guestbookId != null) { + requestSpec.queryParam("guestbookId", guestbookId); + } + return requestSpec.get("/api/dataverses/" + dataverseAlias + "/guestbookResponses/"); + } + static Response createRandomDatasetViaNativeApi(String dataverseAlias, String apiToken) { return createRandomDatasetViaNativeApi(dataverseAlias, apiToken, false); } @@ -515,7 +550,7 @@ static Response updateDatasetMetadataViaNative(String persistentId, String pathT .header(API_TOKEN_HTTP_HEADER, apiToken) .body(jsonIn) .contentType("application/json") - .put("/api/datasets/:persistentId/versions/:draft?persistentId=" + persistentId); + .put("/api/datasets/:persistentId/versions/" + DS_VERSION_DRAFT + "?persistentId=" + persistentId); return response; } @@ -791,7 +826,7 @@ static Response deleteAuxFile(Long fileId, String formatTag, String formatVersio static Response getCrawlableFileAccess(String datasetId, String folderName, String apiToken) { RequestSpecification requestSpecification = given() .header(API_TOKEN_HTTP_HEADER, apiToken); - String apiPath = "/api/datasets/" + datasetId + "/dirindex?version=:draft"; + String apiPath = "/api/datasets/" + datasetId + "/dirindex?version=" + DS_VERSION_DRAFT; if (StringUtil.nonEmpty(folderName)) { apiPath = apiPath.concat("&folder="+folderName); } @@ -1399,15 +1434,24 @@ static Response nativeGetUsingPersistentId(String persistentId, String apiToken) } static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken) { + return getDatasetVersion(persistentId, versionNumber, apiToken, false, false); + } + + static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken, boolean skipFiles, boolean includeDeaccessioned) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/:persistentId/versions/" + versionNumber + "?persistentId=" + persistentId); + .queryParam("includeDeaccessioned", includeDeaccessioned) + .get("/api/datasets/:persistentId/versions/" + + versionNumber + + "?persistentId=" + + persistentId + + (skipFiles ? "&includeFiles=false" : "")); } static Response getMetadataBlockFromDatasetVersion(String persistentId, String versionNumber, String metadataBlock, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) - .get("/api/datasets/:persistentId/versions/:latest-published/metadata/citation?persistentId=" + persistentId); + .get("/api/datasets/:persistentId/versions/" + DS_VERSION_LATEST_PUBLISHED + "/metadata/citation?persistentId=" + persistentId); } static Response makeSuperUser(String username) { @@ -1763,6 +1807,18 @@ static Response removeDatasetThumbnail(String datasetPersistentId, String apiTok } static Response getDatasetVersions(String idOrPersistentId, String apiToken) { + return getDatasetVersions(idOrPersistentId, apiToken, false); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, boolean skipFiles) { + return getDatasetVersions(idOrPersistentId, apiToken, null, null, skipFiles); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, Integer offset, Integer limit) { + return getDatasetVersions(idOrPersistentId, apiToken, offset, limit, false); + } + + static Response getDatasetVersions(String idOrPersistentId, String apiToken, Integer offset, Integer limit, boolean skipFiles) { logger.info("Getting Dataset Versions"); String idInPath = idOrPersistentId; // Assume it's a number. String optionalQueryParam = ""; // If idOrPersistentId is a number we'll just put it in the path. @@ -1770,6 +1826,27 @@ static Response getDatasetVersions(String idOrPersistentId, String apiToken) { idInPath = ":persistentId"; optionalQueryParam = "?persistentId=" + idOrPersistentId; } + if (skipFiles) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?includeFiles=false"; + } else { + optionalQueryParam = optionalQueryParam.concat("&includeFiles=false"); + } + } + if (offset != null) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?offset="+offset; + } else { + optionalQueryParam = optionalQueryParam.concat("&offset="+offset); + } + } + if (limit != null) { + if ("".equals(optionalQueryParam)) { + optionalQueryParam = "?limit="+limit; + } else { + optionalQueryParam = optionalQueryParam.concat("&limit="+limit); + } + } RequestSpecification requestSpecification = given(); if (apiToken != null) { requestSpecification = given() @@ -2922,7 +2999,7 @@ static Response findDatasetStorageSize(String datasetId, String apiToken) { static Response findDatasetDownloadSize(String datasetId) { return given() - .get("/api/datasets/" + datasetId + "/versions/:latest/downloadsize"); + .get("/api/datasets/" + datasetId + "/versions/" + DS_VERSION_LATEST + "/downloadsize"); } static Response findDatasetDownloadSize(String datasetId, String version, String apiToken) { @@ -3276,16 +3353,45 @@ static Response getDatasetVersionCitation(Integer datasetId, String version, Str return response; } - static Response getVersionFiles(Integer datasetId, String version, Integer limit, Integer offset, String orderCriteria, String apiToken) { + static Response getVersionFiles(Integer datasetId, + String version, + Integer limit, + Integer offset, + String contentType, + String accessStatus, + String categoryName, + String tabularTagName, + String searchText, + String orderCriteria, + boolean includeDeaccessioned, + String apiToken) { RequestSpecification requestSpecification = given() - .header(API_TOKEN_HTTP_HEADER, apiToken) - .contentType("application/json"); + .contentType("application/json") + .queryParam("includeDeaccessioned", includeDeaccessioned); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } if (limit != null) { requestSpecification = requestSpecification.queryParam("limit", limit); } if (offset != null) { requestSpecification = requestSpecification.queryParam("offset", offset); } + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } if (orderCriteria != null) { requestSpecification = requestSpecification.queryParam("orderCriteria", orderCriteria); } @@ -3317,9 +3423,155 @@ static Response getFileDataTables(String dataFileId, String apiToken) { .get("/api/files/" + dataFileId + "/dataTables"); } + static Response getUserFileAccessRequested(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/access/datafile/" + dataFileId + "/userFileAccessRequested"); + } + static Response getUserPermissionsOnFile(String dataFileId, String apiToken) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .get("/api/access/datafile/" + dataFileId + "/userPermissions"); } + + static Response getUserPermissionsOnDataset(String datasetId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/datasets/" + datasetId + "/userPermissions"); + } + + static Response createFileEmbargo(Integer datasetId, Integer fileId, String dateAvailable, String apiToken) { + JsonObjectBuilder jsonBuilder = Json.createObjectBuilder(); + jsonBuilder.add("dateAvailable", dateAvailable); + jsonBuilder.add("reason", "This is a test embargo"); + jsonBuilder.add("fileIds", Json.createArrayBuilder().add(fileId)); + String jsonString = jsonBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .contentType("application/json") + .urlEncodingEnabled(false) + .post("/api/datasets/" + datasetId + "/files/actions/:set-embargo"); + } + + static Response getVersionFileCounts(Integer datasetId, + String version, + String contentType, + String accessStatus, + String categoryName, + String tabularTagName, + String searchText, + boolean includeDeaccessioned, + String apiToken) { + RequestSpecification requestSpecification = given() + .queryParam("includeDeaccessioned", includeDeaccessioned); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } + return requestSpecification.get("/api/datasets/" + datasetId + "/versions/" + version + "/files/counts"); + } + + static Response setFileCategories(String dataFileId, String apiToken, List categories) { + JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); + for (String category : categories) { + jsonArrayBuilder.add(category); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("categories", jsonArrayBuilder); + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .post("/api/files/" + dataFileId + "/metadata/categories"); + } + + static Response setFileTabularTags(String dataFileId, String apiToken, List tabularTags) { + JsonArrayBuilder jsonArrayBuilder = Json.createArrayBuilder(); + for (String tabularTag : tabularTags) { + jsonArrayBuilder.add(tabularTag); + } + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("tabularTags", jsonArrayBuilder); + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .post("/api/files/" + dataFileId + "/metadata/tabularTags"); + } + + static Response deleteFileInDataset(Integer fileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .delete("/api/files/" + fileId); + } + + static Response getHasBeenDeleted(String dataFileId, String apiToken) { + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/files/" + dataFileId + "/hasBeenDeleted"); + } + + static Response deaccessionDataset(Integer datasetId, String version, String deaccessionReason, String deaccessionForwardURL, String apiToken) { + JsonObjectBuilder jsonObjectBuilder = Json.createObjectBuilder(); + jsonObjectBuilder.add("deaccessionReason", deaccessionReason); + if (deaccessionForwardURL != null) { + jsonObjectBuilder.add("deaccessionForwardURL", deaccessionForwardURL); + } + String jsonString = jsonObjectBuilder.build().toString(); + return given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .body(jsonString) + .post("/api/datasets/" + datasetId + "/versions/" + version + "/deaccession"); + } + + static Response getDownloadSize(Integer datasetId, + String version, + String contentType, + String accessStatus, + String categoryName, + String tabularTagName, + String searchText, + String mode, + boolean includeDeaccessioned, + String apiToken) { + RequestSpecification requestSpecification = given() + .queryParam("includeDeaccessioned", includeDeaccessioned) + .queryParam("mode", mode); + if (apiToken != null) { + requestSpecification.header(API_TOKEN_HTTP_HEADER, apiToken); + } + if (contentType != null) { + requestSpecification = requestSpecification.queryParam("contentType", contentType); + } + if (accessStatus != null) { + requestSpecification = requestSpecification.queryParam("accessStatus", accessStatus); + } + if (categoryName != null) { + requestSpecification = requestSpecification.queryParam("categoryName", categoryName); + } + if (tabularTagName != null) { + requestSpecification = requestSpecification.queryParam("tabularTagName", tabularTagName); + } + if (searchText != null) { + requestSpecification = requestSpecification.queryParam("searchText", searchText); + } + return requestSpecification + .get("/api/datasets/" + datasetId + "/versions/" + version + "/downloadsize"); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java index 5968cf3eaeb..ee6823ef98a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/authorization/providers/oauth2/oidc/OIDCAuthenticationProviderFactoryIT.java @@ -54,7 +54,7 @@ @Tag(Tags.INTEGRATION_TEST) @Tag(Tags.USES_TESTCONTAINERS) -@Testcontainers +@Testcontainers(disabledWithoutDocker = true) @ExtendWith(MockitoExtension.class) // NOTE: order is important here - Testcontainers must be first, otherwise it's not ready when we call getAuthUrl() @LocalJvmSettings diff --git a/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java new file mode 100644 index 00000000000..dad86e73d19 --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/settings/ConfigCheckServiceTest.java @@ -0,0 +1,110 @@ +package edu.harvard.iq.dataverse.settings; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Assumptions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Nested; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Set; + +import static java.nio.file.attribute.PosixFilePermission.GROUP_READ; +import static java.nio.file.attribute.PosixFilePermission.OWNER_READ; +import org.junit.jupiter.api.Disabled; + +@Disabled +class ConfigCheckServiceTest { + + @TempDir + static Path testDir; + + private static final String testDirProp = "test.filesDir"; + + @AfterAll + static void tearDown() { + System.clearProperty(testDirProp); + } + + @Nested + class TestDirNotAbsolute { + @Test + void nonAbsolutePathForTestDir() { + System.setProperty(testDirProp, "foobar"); + ConfigCheckService sut = new ConfigCheckService(); + Assertions.assertFalse(sut.checkSystemDirectories()); + } + } + + @Nested + class TestDirNotWritable { + + Path notWriteableSubfolder = testDir.resolve("readonly"); + + @BeforeEach + void setUp() throws IOException { + Files.createDirectory(notWriteableSubfolder); + Files.setPosixFilePermissions(notWriteableSubfolder, Set.of(OWNER_READ, GROUP_READ)); + System.setProperty(testDirProp, notWriteableSubfolder.toString()); + } + + @Test + void writeCheckFails() { + Assumptions.assumeTrue(Files.exists(notWriteableSubfolder)); + + ConfigCheckService sut = new ConfigCheckService(); + Assertions.assertFalse(sut.checkSystemDirectories()); + } + } + + @Nested + class TestDirNotExistent { + + Path notExistTestfolder = testDir.resolve("parent-readonly"); + Path notExistConfigSubfolder = notExistTestfolder.resolve("foobar"); + + @BeforeEach + void setUp() throws IOException { + Files.createDirectory(notExistTestfolder); + // Make test dir not writeable, so the subfolder cannot be created + Files.setPosixFilePermissions(notExistTestfolder, Set.of(OWNER_READ, GROUP_READ)); + System.setProperty(testDirProp, notExistConfigSubfolder.toString()); + } + + @Test + void mkdirFails() { + Assumptions.assumeTrue(Files.exists(notExistTestfolder)); + Assumptions.assumeFalse(Files.exists(notExistConfigSubfolder)); + + ConfigCheckService sut = new ConfigCheckService(); + Assertions.assertFalse(sut.checkSystemDirectories()); + } + } + + @Nested + class TestDirCreated { + + Path missingToBeCreatedTestfolder = testDir.resolve("create-me"); + Path missingToBeCreatedSubfolder = missingToBeCreatedTestfolder.resolve("foobar"); + + @BeforeEach + void setUp() throws IOException { + Files.createDirectory(missingToBeCreatedTestfolder); + System.setProperty(testDirProp, missingToBeCreatedSubfolder.toString()); + } + + @Test + void mkdirSucceeds() { + Assumptions.assumeTrue(Files.exists(missingToBeCreatedTestfolder)); + Assumptions.assumeFalse(Files.exists(missingToBeCreatedSubfolder)); + + ConfigCheckService sut = new ConfigCheckService(); + Assertions.assertTrue(sut.checkSystemDirectories()); + } + } + +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java index 41b13417a23..41032ffa811 100644 --- a/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/sitemap/SiteMapUtilTest.java @@ -10,7 +10,6 @@ import edu.harvard.iq.dataverse.util.xml.XmlValidator; import java.io.File; import java.io.IOException; -import java.net.MalformedURLException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; @@ -21,17 +20,39 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; -import static org.junit.jupiter.api.Assertions.*; + import static org.junit.jupiter.api.Assertions.*; import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.io.TempDir; import org.xml.sax.SAXException; -public class SiteMapUtilTest { - +class SiteMapUtilTest { + + @TempDir + Path tempDir; + Path tempDocroot; + + @BeforeEach + void setup() throws IOException { + // NOTE: This might be unsafe for parallel tests, but our @SystemProperty helper does not yet support + // lookups from vars or methods. + System.setProperty("test.filesDir", tempDir.toString()); + this.tempDocroot = tempDir.resolve("docroot"); + Files.createDirectory(tempDocroot); + } + + @AfterEach + void teardown() { + System.clearProperty("test.filesDir"); + } + @Test - public void testUpdateSiteMap() throws IOException, ParseException { - + void testUpdateSiteMap() throws IOException, ParseException, SAXException { + // given List dataverses = new ArrayList<>(); String publishedDvString = "publishedDv1"; Dataverse publishedDataverse = new Dataverse(); @@ -77,39 +98,18 @@ public void testUpdateSiteMap() throws IOException, ParseException { datasetVersions.add(datasetVersion); deaccessioned.setVersions(datasetVersions); datasets.add(deaccessioned); - - Path tmpDirPath = Files.createTempDirectory(null); - String tmpDir = tmpDirPath.toString(); - File docroot = new File(tmpDir + File.separator + "docroot"); - docroot.mkdirs(); - System.setProperty("com.sun.aas.instanceRoot", tmpDir); - + + // when SiteMapUtil.updateSiteMap(dataverses, datasets); - - String pathToTest = tmpDirPath + File.separator + "docroot" + File.separator + "sitemap"; - String pathToSiteMap = pathToTest + File.separator + "sitemap.xml"; - - Exception wellFormedXmlException = null; - try { - assertTrue(XmlValidator.validateXmlWellFormed(pathToSiteMap)); - } catch (Exception ex) { - System.out.println("Exception caught checking that XML is well formed: " + ex); - wellFormedXmlException = ex; - } - assertNull(wellFormedXmlException); - - Exception notValidAgainstSchemaException = null; - try { - assertTrue(XmlValidator.validateXmlSchema(pathToSiteMap, new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"))); - } catch (MalformedURLException | SAXException ex) { - System.out.println("Exception caught validating XML against the sitemap schema: " + ex); - notValidAgainstSchemaException = ex; - } - assertNull(notValidAgainstSchemaException); + + // then + String pathToSiteMap = tempDocroot.resolve("sitemap").resolve("sitemap.xml").toString(); + assertDoesNotThrow(() -> XmlValidator.validateXmlWellFormed(pathToSiteMap)); + assertTrue(XmlValidator.validateXmlSchema(pathToSiteMap, new URL("https://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd"))); File sitemapFile = new File(pathToSiteMap); String sitemapString = XmlPrinter.prettyPrintXml(new String(Files.readAllBytes(Paths.get(sitemapFile.getAbsolutePath())))); - System.out.println("sitemap: " + sitemapString); + //System.out.println("sitemap: " + sitemapString); assertTrue(sitemapString.contains("1955-11-12")); assertTrue(sitemapString.contains(publishedPid)); @@ -117,8 +117,6 @@ public void testUpdateSiteMap() throws IOException, ParseException { assertFalse(sitemapString.contains(harvestedPid)); assertFalse(sitemapString.contains(deaccessionedPid)); - System.clearProperty("com.sun.aas.instanceRoot"); - } } diff --git a/src/test/resources/META-INF/microprofile-config.properties b/src/test/resources/META-INF/microprofile-config.properties index 21f70b53896..113a098a1fe 100644 --- a/src/test/resources/META-INF/microprofile-config.properties +++ b/src/test/resources/META-INF/microprofile-config.properties @@ -8,4 +8,11 @@ dataverse.pid.ezid.api-url=http://example.org # Also requires the username and the password to be present when used in production, use a default for unit testing. dataverse.pid.ezid.username=Dataverse Unit Test -dataverse.pid.ezid.password=supersecret \ No newline at end of file +dataverse.pid.ezid.password=supersecret + +# To test ConfigCheckService, point our files directories to a common test dir by overriding the +# property test.filesDir via system properties +test.filesDir=/tmp/dataverse +dataverse.files.directory=${test.filesDir} +dataverse.files.uploads=${test.filesDir}/uploads +dataverse.files.docroot=${test.filesDir}/docroot