diff --git a/README.md b/README.md index 77720453d5f..2303c001d2c 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,81 @@ Dataverse® =============== -Dataverse is an [open source][] software platform for sharing, finding, citing, and preserving research data (developed by the [Dataverse team](https://dataverse.org/about) at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][]). +![Dataverse-logo](https://github.com/IQSS/dataverse-frontend/assets/7512607/6c4d79e4-7be5-4102-88bd-dfa167dc79d3) -[dataverse.org][] is our home on the web and shows a map of Dataverse installations around the world, a list of [features][], [integrations][] that have been made possible through [REST APIs][], our [project board][], our development [roadmap][], and more. +## Table of Contents -We maintain a demo site at [demo.dataverse.org][] which you are welcome to use for testing and evaluating Dataverse. +1. [❓ What is Dataverse?](#what-is-dataverse) +2. [✔ Try Dataverse](#try-dataverse) +3. [🌐 Features, Integrations, Roadmaps, and More](#website) +4. [📥 Installation](#installation) +5. [🏘 Community and Support](#community-and-support) +6. [🧑‍💻️ Contributing](#contributing) +7. [⚖️ Legal Information](#legal-informations) -To install Dataverse, please see our [Installation Guide][] which will prompt you to download our [latest release][]. Docker users should consult the [Container Guide][]. + -To discuss Dataverse with the community, please join our [mailing list][], participate in a [community call][], chat with us at [chat.dataverse.org][], or attend our annual [Dataverse Community Meeting][]. +## ❓ What is Dataverse? -We love contributors! Please see our [Contributing Guide][] for ways you can help. +Welcome to Dataverse®, the [open source][] software platform designed for sharing, finding, citing, and preserving research data. Developed by the Dataverse team at the [Institute for Quantitative Social Science](https://iq.harvard.edu/) and the [Dataverse community][], our platform makes it easy for research organizations to host, manage, and share their data with the world. + + + +## ✔ Try Dataverse + +We invite you to explore our demo site at [demo.dataverse.org][]. This site is ideal for testing and evaluating Dataverse in a risk-free environment. + + + +## 🌐 Features, Integrations, Roadmaps, and More + +Visit [dataverse.org][], our home on the web, for a comprehensive overview of Dataverse. Here, you will find: + +- An interactive map showcasing Dataverse installations worldwide. +- A detailed list of [features][]. +- Information on [integrations][] that have been made possible through our [REST APIs][]. +- Our [project board][] and development [roadmap][]. +- News, events, and more. + + + +## 📥 Installation + +Ready to get started? Follow our [Installation Guide][] to download and install the latest release of Dataverse. + +If you are using Docker, please refer to our [Container Guide][] for detailed instructions. + + + +## 🏘 Community and Support + +Engage with the vibrant Dataverse community through various channels: + +- **[Mailing List][]**: Join the conversation on our [mailing list][]. +- **[Community Calls][]**: Participate in our regular [community calls][] to discuss new features, ask questions, and share your experiences. +- **[Chat][]**: Connect with us and other users in real-time at [dataverse.zulipchat.com][]. +- **[Dataverse Community Meeting][]**: Attend our annual [Dataverse Community Meeting][] to network, learn, and collaborate with peers and experts. +- **[DataverseTV][]**: Watch the video content from the Dataverse community on [DataverseTV][] and on [Harvard's IQSS YouTube channel][]. + + +## 🧑‍💻️ Contribute to Dataverse + +We love contributors! Whether you are a developer, researcher, or enthusiast, there are many ways you can help. + +Visit our [Contributing Guide][] to learn how you can get involved. + +Join us in building and enhancing Dataverse to make research data more accessible and impactful. Your support and participation are crucial to our success! + + +## ⚖️ Legal Information Dataverse is a trademark of President and Fellows of Harvard College and is registered in the United States. +--- +For more detailed information, visit our website at [dataverse.org][]. + +Feel free to [reach out] with any questions or feedback. Happy researching! + [![Dataverse Project logo](src/main/webapp/resources/images/dataverseproject_logo.jpg "Dataverse Project")](http://dataverse.org) [![API Test Status](https://jenkins.dataverse.org/buildStatus/icon?job=IQSS-dataverse-develop&subject=API%20Test%20Status)](https://jenkins.dataverse.org/job/IQSS-dataverse-develop/) @@ -37,6 +98,11 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi [Contributing Guide]: CONTRIBUTING.md [mailing list]: https://groups.google.com/group/dataverse-community [community call]: https://dataverse.org/community-calls -[chat.dataverse.org]: https://chat.dataverse.org +[Chat]: https://dataverse.zulipchat.com +[dataverse.zulipchat.com]: https://dataverse.zulipchat.com [Dataverse Community Meeting]: https://dataverse.org/events [open source]: LICENSE.md +[community calls]: https://dataverse.org/community-calls +[DataverseTV]: https://dataverse.org/dataversetv +[Harvard's IQSS YouTube channel]: https://www.youtube.com/@iqssatharvarduniversity8672 +[reach out]: https://dataverse.org/contact diff --git a/doc/release-notes/10171-exlude-metadatablocks.md b/doc/release-notes/10171-exlude-metadatablocks.md new file mode 100644 index 00000000000..7c0a9b030eb --- /dev/null +++ b/doc/release-notes/10171-exlude-metadatablocks.md @@ -0,0 +1,4 @@ +Extension of API `{id}/versions` and `{id}/versions/{versionId}` with an optional ``excludeMetadataBlocks`` parameter, +that specifies whether the metadataBlocks should be listed in the output. It defaults to ``false``, preserving backward +compatibility. (Note that for a dataset with a large number of versions and/or metadataBlocks having the metadata blocks +included can dramatically increase the volume of the output). See also [the guides](https://dataverse-guide--10778.org.readthedocs.build/en/10778/api/native-api.html#list-versions-of-a-dataset), #10778, and #10171. diff --git a/doc/release-notes/10241-new-solr-client.md b/doc/release-notes/10241-new-solr-client.md new file mode 100644 index 00000000000..67ccdd4f184 --- /dev/null +++ b/doc/release-notes/10241-new-solr-client.md @@ -0,0 +1,9 @@ +[HttpSolrClient](https://solr.apache.org/docs/9_4_1/solrj/org/apache/solr/client/solrj/impl/HttpSolrClient.html) is deprecated as of Solr 9, and which will be removed in a future major release of Solr. It's recommended to use [Http2SolrClient](https://solr.apache.org/docs/9_4_1/solrj/org/apache/solr/client/solrj/impl/Http2SolrClient.html) instead. + +[Solr documentation](https://solr.apache.org/guide/solr/latest/deployment-guide/solrj.html#types-of-solrclients) describe it as a _async, non-blocking and general-purpose client that leverage HTTP/2 using the Jetty Http library_. + +With Solr 9.4.1, the Http2SolrClient is indicate as experimental. But since the 9.6 version of Solr, this mention is no longer maintained. + +The ConcurrentUpdateHttp2SolrClient is now also used in some cases, which is supposed to be more efficient for indexing. + +For more information, see issue [#10161](https://github.com/IQSS/dataverse/issues/10161) and pull request [#10241](https://github.com/IQSS/dataverse/pull/10241) diff --git a/doc/release-notes/10340-forbidden.md b/doc/release-notes/10340-forbidden.md new file mode 100644 index 00000000000..5997f717d64 --- /dev/null +++ b/doc/release-notes/10340-forbidden.md @@ -0,0 +1,3 @@ +### Backward Incompatible Changes + +The [Show Role](https://dataverse-guide--11116.org.readthedocs.build/en/11116/api/native-api.html#show-role) API endpoint was returning 401 Unauthorized when a permission check failed. This has been corrected to return 403 Forbidden instead. That is, the API token is known to be good (401 otherwise) but the user lacks permission (403 is now sent). See also the [API Changelog](https://dataverse-guide--11116.org.readthedocs.build/en/11116/api/changelog.html), #10340, and #11116. diff --git a/doc/release-notes/10472-review-modify-jsonprinter-for-datasetfieldtype.md b/doc/release-notes/10472-review-modify-jsonprinter-for-datasetfieldtype.md new file mode 100644 index 00000000000..f0b9c30c9cd --- /dev/null +++ b/doc/release-notes/10472-review-modify-jsonprinter-for-datasetfieldtype.md @@ -0,0 +1,8 @@ +### Json Printer Bug fix + +DatasetFieldTypes in MetadataBlock response that are also a child of another DatasetFieldType were being returned twice. The child DatasetFieldType was included in the "fields" object as well as in the "childFields" of it's parent DatasetFieldType. This fix suppresses the standalone object so only one instance of the DatasetFieldType is returned (in the "childFields" of its parent). +This fix changes the Json output of the API `/api/dataverses/{dataverseAlias}/metadatablocks` + +## Backward Incompatible Changes + +The Json response of API call `/api/dataverses/{dataverseAlias}/metadatablocks` will no longer include the DatasetFieldTypes in "fields" if they are children of another DatasetFieldType. The child DatasetFieldType will only be included in the "childFields" of it's parent DatasetFieldType. diff --git a/doc/release-notes/10516_legacy_permalink_config_fix.md b/doc/release-notes/10516_legacy_permalink_config_fix.md new file mode 100644 index 00000000000..d78395252d4 --- /dev/null +++ b/doc/release-notes/10516_legacy_permalink_config_fix.md @@ -0,0 +1 @@ +Support for legacy configuration of a PermaLink PID provider, e.g. using the :Protocol,:Authority, and :Shoulder settings, is fixed. \ No newline at end of file diff --git a/doc/release-notes/10714-access-requests-missing-since-upgrade-v6-0.md b/doc/release-notes/10714-access-requests-missing-since-upgrade-v6-0.md new file mode 100644 index 00000000000..a220c150791 --- /dev/null +++ b/doc/release-notes/10714-access-requests-missing-since-upgrade-v6-0.md @@ -0,0 +1,6 @@ +### Flyway Script added to Fix File Access Requests when upgrading from Dataverse 6.0 + +Database update script added to prevent duplicate keys when upgrading from V6.0 +This script will delete access requests made after the initial request and will set the initial request to "Created" + +See: https://github.com/IQSS/dataverse/issues/10714 diff --git a/doc/release-notes/11038-unconsidered-harvesting-granularity.md b/doc/release-notes/11038-unconsidered-harvesting-granularity.md new file mode 100644 index 00000000000..72ebd522831 --- /dev/null +++ b/doc/release-notes/11038-unconsidered-harvesting-granularity.md @@ -0,0 +1,2 @@ +Bug Fix: +OAI Client harvesting now uses the correct granularity while re-run a partial harvest (using the `from` parameter). The correct granularity comes from the `Identify` verb request. \ No newline at end of file diff --git a/doc/release-notes/11083-mydata-npe-with-harvested-dataverses.md b/doc/release-notes/11083-mydata-npe-with-harvested-dataverses.md new file mode 100644 index 00000000000..230d69c9b9f --- /dev/null +++ b/doc/release-notes/11083-mydata-npe-with-harvested-dataverses.md @@ -0,0 +1 @@ +Fix a bug with My Data where listing dataverses for a user with only rights on harvested dataverses would result in a server error response. \ No newline at end of file diff --git a/doc/release-notes/11095-fix-extcvoc-indexing.md b/doc/release-notes/11095-fix-extcvoc-indexing.md new file mode 100644 index 00000000000..f4931d81263 --- /dev/null +++ b/doc/release-notes/11095-fix-extcvoc-indexing.md @@ -0,0 +1,7 @@ +Some External Controlled Vocabulary scripts/configurations, when used on a metadata field that is single-valued could result +in indexing failure for the dataset (e.g. when the script tried to index both the identifier and name of the identified entity for indexing). +Dataverse has been updated to correctly indicate the need for a multi-valued Solr field in these cases in the call to /api/admin/index/solr/schema. +Configuring the Solr schema and the update-fields.sh script as usually recommended when using custom metadata blocks will resolve the issue. + +The overall release notes should include a Solr update (which hopefully is required by an update to 9.7.0 anyway) and our standard instructions +should change to recommending use of the update-fields.sh script when using custom metadatablocks *and/or external vocabulary scripts*. diff --git a/doc/release-notes/11107-fake-to-perma-demo.md b/doc/release-notes/11107-fake-to-perma-demo.md new file mode 100644 index 00000000000..afb6b8a7917 --- /dev/null +++ b/doc/release-notes/11107-fake-to-perma-demo.md @@ -0,0 +1,3 @@ +### Demo/Eval Container Tutorial + +The demo/eval container tutorial has been updated to use the Permalink PID provider instead of the FAKE DOI Provider. See also #11107. diff --git a/doc/release-notes/11113-avoid-orphan-perm-docs.md b/doc/release-notes/11113-avoid-orphan-perm-docs.md new file mode 100644 index 00000000000..4c52d72d7db --- /dev/null +++ b/doc/release-notes/11113-avoid-orphan-perm-docs.md @@ -0,0 +1,5 @@ +This release fixes a bug that caused Dataverse to generate unnecessary solr documents for files when a file is added/deleted from a draft dataset. These documents could accumulate and potentially impact performance. + +Assuming the upgrade to solr 9.7.0 also occurs in this release, there's nothing else needed for this PR. (Starting with a new solr insures the solr db is empty and that a reindex is already required.) + + diff --git a/doc/release-notes/11130-update-dataverse-api-remove-metadatablocks.md b/doc/release-notes/11130-update-dataverse-api-remove-metadatablocks.md new file mode 100644 index 00000000000..e71a67142b3 --- /dev/null +++ b/doc/release-notes/11130-update-dataverse-api-remove-metadatablocks.md @@ -0,0 +1,8 @@ +### Fixes consequences for not adding some optional fields in update dataverse API + +Omitting optional fields inputLevels, facetIds, or metadataBlockNames caused data to be deleted. +This fix no longer deletes data for these fields. Two new flags have been added to the ``metadataBlocks`` Json object to signal the deletion of the data. +- ``inheritMetadataBlocksFromParent: true`` will remove ``metadataBlockNames`` and ``inputLevels`` if the Json objects are omitted. +- ``inheritFacetsFromParent: true`` will remove ``facetIds`` if the Json object is omitted. + +For more information, see issue [#11130](https://github.com/IQSS/dataverse/issues/11130) diff --git a/doc/release-notes/11133-search-fix.md b/doc/release-notes/11133-search-fix.md new file mode 100644 index 00000000000..88962b70ea0 --- /dev/null +++ b/doc/release-notes/11133-search-fix.md @@ -0,0 +1,3 @@ +### Search fix when using AVOID_EXPENSIVE_SOLR_JOIN=true + +Dataverse v6.5 introduced a bug which causes search to fail for non-superusers in multiple groups when the AVOID_EXPENSIVE_SOLR_JOIN feature flag is set to true. This releases fixes the bug. diff --git a/doc/release-notes/8808-10575-update-global-role.md b/doc/release-notes/8808-10575-update-global-role.md new file mode 100644 index 00000000000..38142f972e8 --- /dev/null +++ b/doc/release-notes/8808-10575-update-global-role.md @@ -0,0 +1,11 @@ +## Release Highlights + +### Update a Global Role + +A new API endpoint has been added that allows a global role to be updated. See [Native API Guide > Update Global Role](https://guides.dataverse.org/en/6.3/api/native-api.html#update-global-role) (#10612) + +## Bug fixes + +### Edition of custom role fixed + +It is now possible to edit a custom role with the same alias (reported in #8808) \ No newline at end of file diff --git a/doc/release-notes/9294-improvement-and-internationalization-of-harvest-status.md b/doc/release-notes/9294-improvement-and-internationalization-of-harvest-status.md new file mode 100644 index 00000000000..f9fc465292c --- /dev/null +++ b/doc/release-notes/9294-improvement-and-internationalization-of-harvest-status.md @@ -0,0 +1,6 @@ +## Improvement and internationalization of harvest status + +Added a harvest status to differentiate a complete harvest with errors (Completed with failures) and without errors (Completed) +Harvest status labels are now internationalized + +For more information, see issue [#9294](https://github.com/IQSS/dataverse/issues/9294) \ No newline at end of file diff --git a/doc/sphinx-guides/source/admin/metadatacustomization.rst b/doc/sphinx-guides/source/admin/metadatacustomization.rst index 4c9dc693a0d..ae832daa0ee 100644 --- a/doc/sphinx-guides/source/admin/metadatacustomization.rst +++ b/doc/sphinx-guides/source/admin/metadatacustomization.rst @@ -564,8 +564,7 @@ Using External Vocabulary Services The Dataverse software has a mechanism to associate specific fields defined in metadata blocks with a vocabulary(ies) managed by external services. The mechanism relies on trusted third-party Javascripts. The mapping from field type to external vocabulary(ies) is managed via the :ref:`:CVocConf <:CVocConf>` setting. -*This functionality is considered 'experimental'. It may require significant effort to configure and is likely to evolve in subsequent Dataverse software releases.* - +*This functionality may require significant effort to configure and is likely to evolve in subsequent Dataverse software releases.* The effect of configuring this mechanism is similar to that of defining a field in a metadata block with 'allowControlledVocabulary=true': @@ -590,6 +589,9 @@ Configuration involves specifying which fields are to be mapped, to which Solr f These are all defined in the :ref:`:CVocConf <:CVocConf>` setting as a JSON array. Details about the required elements as well as example JSON arrays are available at https://github.com/gdcc/dataverse-external-vocab-support, along with an example metadata block that can be used for testing. The scripts required can be hosted locally or retrieved dynamically from https://gdcc.github.io/ (similar to how dataverse-previewers work). +Since external vocabulary scripts can change how fields are indexed (storing an identifier and name and/or values in different languages), +updating the Solr schema as described in :ref:`update-solr-schema` should be done after adding new scripts to your configuration. + Please note that in addition to the :ref:`:CVocConf` described above, an alternative is the :ref:`:ControlledVocabularyCustomJavaScript` setting. Protecting MetadataBlocks diff --git a/doc/sphinx-guides/source/api/changelog.rst b/doc/sphinx-guides/source/api/changelog.rst index 162574e7799..5ae152aeace 100644 --- a/doc/sphinx-guides/source/api/changelog.rst +++ b/doc/sphinx-guides/source/api/changelog.rst @@ -11,6 +11,7 @@ v6.6 ---- - **/api/metadatablocks** is no longer returning duplicated metadata properties and does not omit metadata properties when called. +- **/api/roles**: :ref:`show-role` now properly returns 403 Forbidden instead of 401 Unauthorized when you pass a working API token that doesn't have the right permission. v6.5 ---- diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index cf088963b7d..7cd84565cbd 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -128,12 +128,23 @@ Note that setting any of these fields overwrites the previous configuration. When it comes to omitting these fields in the JSON: -- Omitting ``facetIds`` or ``metadataBlockNames`` causes the Dataverse collection to inherit the corresponding configuration from its parent. -- Omitting ``inputLevels`` removes any existing custom input levels in the Dataverse collection. -- Omitting the entire ``metadataBlocks`` object in the request JSON would exclude the three sub-objects, resulting in the application of the two changes described above. +- Omitting ``facetIds`` or ``metadataBlockNames`` causes no change to the Dataverse collection. To delete the current configuration and inherit the corresponding configuration from its parent include the flag ``inheritFacetsFromParent`` and/or ``inheritMetadataBlocksFromParent`` respectively. +- Omitting ``inputLevels`` causes no change to the Dataverse collection. Including the flag ``inheritMetadataBlocksFromParent`` will cause the custom ``inputLevels`` to be deleted and inherited from the parent. +- Omitting the entire ``metadataBlocks`` object in the request JSON would cause no change to the ``inputLevels``, ``facetIds`` or ``metadataBlockNames`` of the Dataverse collection. To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. +To force the configurations to be deleted and inherited from the parent's configuration include the following ``metadataBlocks`` object in your JSON + +.. code-block:: json + + "metadataBlocks": { + "inheritMetadataBlocksFromParent": true, + "inheritFacetsFromParent": true + } + +.. note:: Including both the list ``metadataBlockNames`` and the flag ``"inheritMetadataBlocksFromParent": true`` will result in an error being returned {"status": "ERROR", "message": "Metadata block can not contain both metadataBlockNames and inheritMetadataBlocksFromParent: true"}. The same is true for ``facetIds`` and ``inheritFacetsFromParent``. + See also :ref:`collection-attributes-api`. .. _view-dataverse: @@ -424,13 +435,13 @@ Creates a new role under Dataverse collection ``id``. Needs a json file with the export SERVER_URL=https://demo.dataverse.org export ID=root - curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/dataverses/$ID/roles" --upload-file roles.json + curl -H "X-Dataverse-key:$API_TOKEN" -H "Content-type:application/json" -X POST "$SERVER_URL/api/dataverses/$ID/roles" --upload-file roles.json The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X POST -H "Content-type:application/json" "https://demo.dataverse.org/api/dataverses/root/roles" --upload-file roles.json + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -H "Content-type:application/json" -X POST "https://demo.dataverse.org/api/dataverses/root/roles" --upload-file roles.json For ``roles.json`` see :ref:`json-representation-of-a-role` @@ -1295,6 +1306,8 @@ It returns a list of versions with their metadata, and file list: The optional ``excludeFiles`` parameter specifies whether the files should be listed in the output. It defaults to ``true``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or files having the files included can dramatically increase the volume of the output). A separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. +The optional ``excludeMetadataBlocks`` parameter specifies whether the metadata blocks should be listed in the output. It defaults to ``false``, preserving backward compatibility. (Note that for a dataset with a large number of versions and/or metadata blocks having the metadata blocks included can dramatically increase the volume of the output). + The optional ``offset`` and ``limit`` parameters can be used to specify the range of the versions list to be shown. This can be used to paginate through the list in a dataset with a large number of versions. @@ -1319,6 +1332,12 @@ The fully expanded example above (without environment variables) looks like this The optional ``excludeFiles`` parameter specifies whether the files should be listed in the output (defaults to ``true``). Note that a separate ``/files`` API can be used for listing the files, or a subset thereof in a given version. +.. code-block:: bash + + curl "https://demo.dataverse.org/api/datasets/24/versions/1.0?excludeMetadataBlocks=false" + +The optional ``excludeMetadataBlocks`` parameter specifies whether the metadata blocks should be listed in the output (defaults to ``false``). + By default, deaccessioned dataset versions are not included in the search when applying the :latest or :latest-published identifiers. Additionally, when filtering by a specific version tag, you will get a "not found" error if the version is deaccessioned and you do not enable the ``includeDeaccessioned`` option described below. @@ -4550,12 +4569,12 @@ The JSON representation of a role (``roles.json``) looks like this:: { "alias": "sys1", - "name": “Restricted System Role”, - "description": “A person who may only add datasets.”, + "name": "Restricted System Role", + "description": "A person who may only add datasets.", "permissions": [ "AddDataset" ] - } + } .. note:: alias is constrained to a length of 16 characters @@ -4564,17 +4583,49 @@ Create Role Roles can be created globally (:ref:`create-global-role`) or for individual Dataverse collections (:ref:`create-role-in-collection`). +.. _show-role: + Show Role ~~~~~~~~~ -Shows the role with ``id``:: +You must have ``ManageDataversePermissions`` to be able to show a role that was created using :ref:`create-role-in-collection`. Global roles (:ref:`create-global-role`) can only be shown with a superuser API token. + +An example using a role alias: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ALIAS=sys1 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/roles/:alias?alias=$ALIAS" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/roles/:alias?alias=sys1" + +An example using a role id: + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export ID=11 + + curl -H "X-Dataverse-key:$API_TOKEN" "$SERVER_URL/api/roles/$ID" + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash - GET http://$SERVER/api/roles/$id + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" "https://demo.dataverse.org/api/roles/11" Delete Role ~~~~~~~~~~~ -A curl example using an ``ID`` +An example using a role id: .. code-block:: bash @@ -4590,13 +4641,13 @@ The fully expanded example above (without environment variables) looks like this curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/24" -A curl example using a Role alias ``ALIAS`` +An example using a role alias: .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx export SERVER_URL=https://demo.dataverse.org - export ALIAS=roleAlias + export ALIAS=sys1 curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/roles/:alias?alias=$ALIAS" @@ -4604,8 +4655,7 @@ The fully expanded example above (without environment variables) looks like this .. code-block:: bash - curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/:alias?alias=roleAlias" - + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X DELETE "https://demo.dataverse.org/api/roles/:alias?alias=sys1" Explicit Groups --------------- @@ -5713,22 +5763,43 @@ Creates a global role in the Dataverse installation. The data POSTed are assumed .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export SERVER_URL=https://demo.dataverse.org - export ID=root + export SERVER_URL=http://localhost:8080 - curl -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/roles" --upload-file roles.json + curl -H "Content-Type: application/json" -H "X-Dataverse-key:$API_TOKEN" -X POST "$SERVER_URL/api/admin/roles" --upload-file roles.json + +``roles.json`` see :ref:`json-representation-of-a-role` + +Update Global Role +~~~~~~~~~~~~~~~~~~ + +Update a global role in the Dataverse installation. The PUTed data is assumed to be a complete JSON role as it will overwrite the existing role. :: + + PUT http://$SERVER/api/admin/roles/$ID + +A curl example using an ``ID`` + +.. code-block:: bash + + export SERVER_URL=http://localhost:8080 + export ID=24 + + curl -H "Content-Type: application/json" -X PUT "$SERVER_URL/api/admin/roles/$ID" --upload-file roles.json ``roles.json`` see :ref:`json-representation-of-a-role` Delete Global Role ~~~~~~~~~~~~~~~~~~ +Deletes an ``DataverseRole`` whose ``id`` is passed. :: + + DELETE http://$SERVER/api/admin/roles/$ID + A curl example using an ``ID`` .. code-block:: bash export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx - export SERVER_URL=https://demo.dataverse.org + export SERVER_URL=http://localhost:8080 export ID=24 curl -H "X-Dataverse-key:$API_TOKEN" -X DELETE "$SERVER_URL/api/admin/roles/$ID" @@ -6617,6 +6688,8 @@ MyData The MyData API is used to get a list of just the datasets, dataverses or datafiles an authenticated user can edit. +The API excludes dataverses linked to an harvesting client. This results in `a known issue `_ where regular datasets in harvesting dataverses are missing from the results. + A curl example listing objects .. code-block:: bash diff --git a/doc/sphinx-guides/source/container/running/demo.rst b/doc/sphinx-guides/source/container/running/demo.rst index b1945070714..2483d3217a5 100644 --- a/doc/sphinx-guides/source/container/running/demo.rst +++ b/doc/sphinx-guides/source/container/running/demo.rst @@ -160,6 +160,11 @@ Next, set up the UI toggle between English and French, again using the unblock k Stop and start the Dataverse container in order for the language toggle to work. +PID Providers ++++++++++++++ + +Dataverse supports multiple Persistent ID (PID) providers. The ``compose.yml`` file uses the Permalink PID provider. Follow :ref:`pids-configuration` to reconfigure as needed. + Next Steps ---------- diff --git a/doc/sphinx-guides/source/installation/config.rst b/doc/sphinx-guides/source/installation/config.rst index 111356b9a70..f3b42b74aa3 100644 --- a/doc/sphinx-guides/source/installation/config.rst +++ b/doc/sphinx-guides/source/installation/config.rst @@ -3356,9 +3356,6 @@ please find all known feature flags below. Any of these flags can be activated u * - reduce-solr-deletes - Avoids deleting and recreating solr documents for dataset files when reindexing. - ``Off`` - * - reduce-solr-deletes - - Avoids deleting and recreating solr documents for dataset files when reindexing. - - ``Off`` * - disable-return-to-author-reason - Removes the reason field in the `Publish/Return To Author` dialog that was added as a required field in v6.2 and makes the reason an optional parameter in the :ref:`return-a-dataset` API call. - ``Off`` @@ -4657,6 +4654,9 @@ The commands below should give you an idea of how to load the configuration, but ``curl -X PUT --upload-file cvoc-conf.json http://localhost:8080/api/admin/settings/:CVocConf`` +Since external vocabulary scripts can change how fields are indexed (storing an identifier and name and/or values in different languages), +updating the Solr schema as described in :ref:`update-solr-schema` should be done after adding new scripts to your configuration. + .. _:ControlledVocabularyCustomJavaScript: :ControlledVocabularyCustomJavaScript diff --git a/docker/compose/demo/compose.yml b/docker/compose/demo/compose.yml index 60ed130612e..bc0fe9825ba 100644 --- a/docker/compose/demo/compose.yml +++ b/docker/compose/demo/compose.yml @@ -20,12 +20,12 @@ services: -Ddataverse.files.file1.type=file -Ddataverse.files.file1.label=Filesystem -Ddataverse.files.file1.directory=${STORAGE_DIR}/store - -Ddataverse.pid.providers=fake - -Ddataverse.pid.default-provider=fake - -Ddataverse.pid.fake.type=FAKE - -Ddataverse.pid.fake.label=FakeDOIProvider - -Ddataverse.pid.fake.authority=10.5072 - -Ddataverse.pid.fake.shoulder=FK2/ + -Ddataverse.pid.providers=perma1 + -Ddataverse.pid.default-provider=perma1 + -Ddataverse.pid.perma1.type=perma + -Ddataverse.pid.perma1.label=Perma1 + -Ddataverse.pid.perma1.authority=DV + -Ddataverse.pid.perma1.permalink.separator=/ #-Ddataverse.lang.directory=/dv/lang ports: - "8080:8080" # HTTP (Dataverse Application) diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 1a610d9ea6e..01c1a48e117 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -1142,4 +1142,12 @@ public boolean isDeaccessioned() { } return inDeaccessionedVersions; // since any published version would have already returned } + public boolean isInDatasetVersion(DatasetVersion version) { + for (FileMetadata fmd : getFileMetadatas()) { + if (fmd.getDatasetVersion().equals(version)) { + return true; + } + } + return false; + } } // end of class diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 33a093c8044..570a22a1bd1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -163,7 +163,7 @@ import edu.harvard.iq.dataverse.util.FileMetadataUtil; import java.util.Comparator; import org.apache.solr.client.solrj.SolrQuery; -import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteSolrException; import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; @@ -1041,7 +1041,7 @@ public Set getFileIdsInVersionFromSolr(Long datasetVersionId, String patte try { queryResponse = solrClientService.getSolrClient().query(solrQuery); - } catch (HttpSolrClient.RemoteSolrException ex) { + } catch (RemoteSolrException ex) { logger.fine("Remote Solr Exception: " + ex.getLocalizedMessage()); String msg = ex.getLocalizedMessage(); if (msg.contains(SearchFields.FILE_DELETED)) { diff --git a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java index 78d5eaf3414..b751841da74 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataverseRoleServiceBean.java @@ -23,7 +23,6 @@ import jakarta.persistence.EntityManager; import jakarta.persistence.PersistenceContext; import jakarta.persistence.TypedQuery; -//import jakarta.validation.constraints.NotNull; /** * @@ -40,6 +39,9 @@ public class DataverseRoleServiceBean implements java.io.Serializable { @EJB RoleAssigneeServiceBean roleAssigneeService; + + @EJB + DataverseServiceBean dataverseService; @EJB IndexServiceBean indexService; @EJB @@ -48,22 +50,23 @@ public class DataverseRoleServiceBean implements java.io.Serializable { IndexAsync indexAsync; public DataverseRole save(DataverseRole aRole) { - if (aRole.getId() == null) { + if (aRole.getId() == null) { // persist a new Role em.persist(aRole); - /** - * @todo Why would getId be null? Should we call - * indexDefinitionPoint here too? A: it's null for new roles. - */ - return aRole; - } else { - DataverseRole merged = em.merge(aRole); - /** - * @todo update permissionModificationTime here. - */ - IndexResponse indexDefinitionPountResult = indexDefinitionPoint(merged.getOwner()); - logger.info("aRole getId was not null. Indexing result: " + indexDefinitionPountResult); - return merged; + } else { // update an existing Role + aRole = em.merge(aRole); + } + + DvObject owner = aRole.getOwner(); + if(owner == null) { // Builtin Role + owner = dataverseService.findByAlias("root"); + } + + if(owner != null) { // owner may be null if a role is created before the root collection as in setup-all.sh + IndexResponse indexDefinitionPointResult = indexDefinitionPoint(owner); + logger.info("Indexing result: " + indexDefinitionPointResult); } + + return aRole; } public RoleAssignment save(RoleAssignment assignment) { diff --git a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java index 3c1074b75bb..8a88ff042ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/AbstractApiBean.java @@ -831,6 +831,18 @@ protected Response badRequest(String msg, Map fieldErrors) { .build(); } + /** + * In short, your password is fine but you don't have permission. + * + * "The 403 (Forbidden) status code indicates that the server understood the + * request but refuses to authorize it. A server that wishes to make public + * why the request has been forbidden can describe that reason in the + * response payload (if any). + * + * If authentication credentials were provided in the request, the server + * considers them insufficient to grant access." -- + * https://datatracker.ietf.org/doc/html/rfc7231#section-6.5.3 + */ protected Response forbidden( String msg ) { return error( Status.FORBIDDEN, msg ); } @@ -852,9 +864,17 @@ protected Response permissionError( PermissionException pe ) { } protected Response permissionError( String message ) { - return unauthorized( message ); + return forbidden( message ); } + /** + * In short, bad password. + * + * "The 401 (Unauthorized) status code indicates that the request has not + * been applied because it lacks valid authentication credentials for the + * target resource." -- + * https://datatracker.ietf.org/doc/html/rfc7235#section-3.1 + */ protected Response unauthorized( String message ) { return error( Status.UNAUTHORIZED, message ); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java index 152bcf5066e..2d850cc092f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java @@ -1,11 +1,30 @@ package edu.harvard.iq.dataverse.api; -import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.BannerMessage; +import edu.harvard.iq.dataverse.BannerMessageServiceBean; +import edu.harvard.iq.dataverse.BannerMessageText; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseRequestServiceBean; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.DataverseSession; +import edu.harvard.iq.dataverse.DvObject; +import edu.harvard.iq.dataverse.DvObjectServiceBean; +import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import edu.harvard.iq.dataverse.validation.EMailValidator; +import edu.harvard.iq.dataverse.EjbDataverseEngine; +import edu.harvard.iq.dataverse.Template; +import edu.harvard.iq.dataverse.TemplateServiceBean; +import edu.harvard.iq.dataverse.UserServiceBean; import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord; import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.authorization.AuthenticatedUserDisplayInfo; @@ -49,7 +68,8 @@ import java.io.InputStream; import java.io.StringReader; import java.nio.charset.StandardCharsets; -import java.util.*; +import java.util.Collections; +import java.util.Map; import java.util.Map.Entry; import java.util.function.Predicate; import java.util.logging.Level; @@ -65,6 +85,7 @@ import org.apache.commons.io.IOUtils; +import java.util.List; import edu.harvard.iq.dataverse.authorization.AuthTestDataServiceBean; import edu.harvard.iq.dataverse.authorization.AuthenticationProvidersRegistrationServiceBean; import edu.harvard.iq.dataverse.authorization.DataverseRole; @@ -101,7 +122,9 @@ import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.rolesToJson; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.toJsonArray; - +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; import jakarta.inject.Inject; import jakarta.json.JsonArray; import jakarta.persistence.Query; @@ -109,6 +132,7 @@ import jakarta.ws.rs.WebApplicationException; import jakarta.ws.rs.core.StreamingOutput; import java.nio.file.Paths; +import java.util.TreeMap; /** * Where the secure, setup API calls live. @@ -990,6 +1014,22 @@ public Response createNewBuiltinRole(RoleDTO roleDto) { actionLogSvc.log(alr); } } + @Path("roles/{id}") + @PUT + public Response updateBuiltinRole(RoleDTO roleDto, @PathParam("id") long roleId) { + ActionLogRecord alr = new ActionLogRecord(ActionLogRecord.ActionType.Admin, "updateBuiltInRole") + .setInfo(roleDto.getAlias() + ":" + roleDto.getDescription()); + try { + DataverseRole role = roleDto.updateRoleFromDTO(rolesSvc.find(roleId)); + return ok(json(rolesSvc.save(role))); + } catch (Exception e) { + alr.setActionResult(ActionLogRecord.Result.InternalError); + alr.setInfo(alr.getInfo() + "// " + e.getMessage()); + return error(Response.Status.INTERNAL_SERVER_ERROR, e.getMessage()); + } finally { + actionLogSvc.log(alr); + } + } @Path("roles") @GET diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java index 2ec10816acc..0fe9099a3e4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java @@ -421,15 +421,16 @@ public Response useDefaultCitationDate(@Context ContainerRequestContext crc, @Pa @GET @AuthRequired @Path("{id}/versions") - public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) { + public Response listVersions(@Context ContainerRequestContext crc, @PathParam("id") String id, @QueryParam("excludeFiles") Boolean excludeFiles,@QueryParam("excludeMetadataBlocks") Boolean excludeMetadataBlocks, @QueryParam("limit") Integer limit, @QueryParam("offset") Integer offset) { return response( req -> { Dataset dataset = findDatasetOrDie(id); Boolean deepLookup = excludeFiles == null ? true : !excludeFiles; + Boolean includeMetadataBlocks = excludeMetadataBlocks == null ? true : !excludeMetadataBlocks; return ok( execCommand( new ListVersionsCommand(req, dataset, offset, limit, deepLookup) ) .stream() - .map( d -> json(d, deepLookup) ) + .map( d -> json(d, deepLookup, includeMetadataBlocks) ) .collect(toJsonArray())); }, getRequestUser(crc)); } @@ -441,6 +442,7 @@ public Response getVersion(@Context ContainerRequestContext crc, @PathParam("id") String datasetId, @PathParam("versionId") String versionId, @QueryParam("excludeFiles") Boolean excludeFiles, + @QueryParam("excludeMetadataBlocks") Boolean excludeMetadataBlocks, @QueryParam("includeDeaccessioned") boolean includeDeaccessioned, @QueryParam("returnOwners") boolean returnOwners, @Context UriInfo uriInfo, @@ -466,11 +468,12 @@ public Response getVersion(@Context ContainerRequestContext crc, if (excludeFiles == null ? true : !excludeFiles) { requestedDatasetVersion = datasetversionService.findDeep(requestedDatasetVersion.getId()); } + Boolean includeMetadataBlocks = excludeMetadataBlocks == null ? true : !excludeMetadataBlocks; JsonObjectBuilder jsonBuilder = json(requestedDatasetVersion, null, - excludeFiles == null ? true : !excludeFiles, - returnOwners); + excludeFiles == null ? true : !excludeFiles, + returnOwners, includeMetadataBlocks); return ok(jsonBuilder); }, getRequestUser(crc)); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index c494a5c9ccd..f406f6078ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import com.google.common.collect.Lists; import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean; @@ -195,7 +196,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod List facets = parseFacets(body); AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO, true)); + dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO)); return ok(json(dataverse)); } catch (WrappedResponse ww) { @@ -221,31 +222,60 @@ private DataverseDTO parseAndValidateUpdateDataverseRequestBody(String body) thr } } + /* + return null - ignore + return empty list - delete and inherit from parent + return non-empty list - update + */ private List parseInputLevels(String body, Dataverse dataverse) throws WrappedResponse { JsonObject metadataBlocksJson = getMetadataBlocksJson(body); - if (metadataBlocksJson == null) { - return null; + JsonArray inputLevelsArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("inputLevels") : null; + + if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) { + return Lists.newArrayList(); // delete } - JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); - return inputLevelsArray != null ? parseInputLevels(inputLevelsArray, dataverse) : null; + return parseInputLevels(inputLevelsArray, dataverse); } + /* + return null - ignore + return empty list - delete and inherit from parent + return non-empty list - update + */ private List parseMetadataBlocks(String body) throws WrappedResponse { JsonObject metadataBlocksJson = getMetadataBlocksJson(body); - if (metadataBlocksJson == null) { - return null; + JsonArray metadataBlocksArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("metadataBlockNames") : null; + + if (metadataBlocksArray != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "metadataBlockNames", "inheritMetadataBlocksFromParent"); + throw new WrappedResponse(badRequest(errorMessage)); + } + if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritMetadataBlocksFromParent") && metadataBlocksJson.getBoolean("inheritMetadataBlocksFromParent")) { + return Lists.newArrayList(); // delete and inherit from parent } - JsonArray metadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); - return metadataBlocksArray != null ? parseNewDataverseMetadataBlocks(metadataBlocksArray) : null; + + return parseNewDataverseMetadataBlocks(metadataBlocksArray); } + /* + return null - ignore + return empty list - delete and inherit from parent + return non-empty list - update + */ private List parseFacets(String body) throws WrappedResponse { JsonObject metadataBlocksJson = getMetadataBlocksJson(body); - if (metadataBlocksJson == null) { - return null; + JsonArray facetsArray = metadataBlocksJson != null ? metadataBlocksJson.getJsonArray("facetIds") : null; + + if (facetsArray != null && metadataBlocksJson.containsKey("inheritFacetsFromParent") && metadataBlocksJson.getBoolean("inheritFacetsFromParent")) { + String errorMessage = MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "facetIds", "inheritFacetsFromParent"); + throw new WrappedResponse(badRequest(errorMessage)); + } + + if (metadataBlocksJson != null && metadataBlocksJson.containsKey("inheritFacetsFromParent") && metadataBlocksJson.getBoolean("inheritFacetsFromParent")) { + return Lists.newArrayList(); // delete and inherit from parent } - JsonArray facetsArray = metadataBlocksJson.getJsonArray("facetIds"); - return facetsArray != null ? parseFacets(facetsArray) : null; + + return parseFacets(facetsArray); } private JsonObject getMetadataBlocksJson(String body) { @@ -277,6 +307,9 @@ private Response handleEJBException(EJBException ex, String action) { } private List parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse { + if (metadataBlockNamesArray == null) { + return null; + } List selectedMetadataBlocks = new ArrayList<>(); for (JsonString metadataBlockName : metadataBlockNamesArray.getValuesAs(JsonString.class)) { MetadataBlock metadataBlock = metadataBlockSvc.findByName(metadataBlockName.getString()); @@ -745,6 +778,9 @@ public Response updateInputLevels(@Context ContainerRequestContext crc, @PathPar } private List parseInputLevels(JsonArray inputLevelsArray, Dataverse dataverse) throws WrappedResponse { + if (inputLevelsArray == null) { + return null; + } List newInputLevels = new ArrayList<>(); for (JsonValue value : inputLevelsArray) { JsonObject inputLevel = (JsonObject) value; @@ -771,6 +807,9 @@ private List parseInputLevels(JsonArray inputLevel } private List parseFacets(JsonArray facetsArray) throws WrappedResponse { + if (facetsArray == null) { + return null; + } List facets = new LinkedList<>(); for (JsonString facetId : facetsArray.getValuesAs(JsonString.class)) { DatasetFieldType dsfType = findDatasetFieldType(facetId.getString()); diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Index.java b/src/main/java/edu/harvard/iq/dataverse/api/Index.java index c30a77acb58..bc9a8ae692b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Index.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Index.java @@ -44,6 +44,7 @@ import java.lang.reflect.Field; import java.util.ArrayList; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; @@ -451,11 +452,11 @@ public Response clearOrphans(@QueryParam("sync") String sync) { public String getSolrSchema() { StringBuilder sb = new StringBuilder(); - - for (DatasetFieldType datasetField : datasetFieldService.findAllOrderedByName()) { + Map cvocTermUriMap = datasetFieldSvc.getCVocConf(true); + for (DatasetFieldType datasetFieldType : datasetFieldService.findAllOrderedByName()) { //ToDo - getSolrField() creates/returns a new object - just get it once and re-use - String nameSearchable = datasetField.getSolrField().getNameSearchable(); - SolrField.SolrType solrType = datasetField.getSolrField().getSolrType(); + String nameSearchable = datasetFieldType.getSolrField().getNameSearchable(); + SolrField.SolrType solrType = datasetFieldType.getSolrField().getSolrType(); String type = solrType.getType(); if (solrType.equals(SolrField.SolrType.EMAIL)) { /** @@ -474,7 +475,7 @@ public String getSolrSchema() { */ logger.info("email type detected (" + nameSearchable + ") See also https://github.com/IQSS/dataverse/issues/759"); } - String multivalued = datasetField.getSolrField().isAllowedToBeMultivalued().toString(); + String multivalued = Boolean.toString(datasetFieldType.getSolrField().isAllowedToBeMultivalued() || cvocTermUriMap.containsKey(datasetFieldType.getId())); // sb.append(" \n"); } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/RoleDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/RoleDTO.java index 58e30ade584..5769ab430ad 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/dto/RoleDTO.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/RoleDTO.java @@ -47,11 +47,11 @@ public void setPermissions(String[] permissions) { this.permissions = permissions; } - public DataverseRole asRole() { - DataverseRole r = new DataverseRole(); + public DataverseRole updateRoleFromDTO(DataverseRole r) { r.setAlias(alias); r.setDescription(description); r.setName(name); + r.clearPermissions(); if (permissions != null) { if (permissions.length > 0) { if (permissions[0].trim().toLowerCase().equals("all")) { @@ -65,5 +65,9 @@ public DataverseRole asRole() { } return r; } + + public DataverseRole asRole() { + return updateRoleFromDTO(new DataverseRole()); + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 91f3a5b823c..2a72485d821 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -19,15 +19,13 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand private final List inputLevels; private final List facets; protected final List metadataBlocks; - private final boolean resetRelationsOnNullValues; public AbstractWriteDataverseCommand(Dataverse dataverse, Dataverse affectedDataverse, DataverseRequest request, List facets, List inputLevels, - List metadataBlocks, - boolean resetRelationsOnNullValues) { + List metadataBlocks) { super(request, affectedDataverse); this.dataverse = dataverse; if (facets != null) { @@ -45,7 +43,6 @@ public AbstractWriteDataverseCommand(Dataverse dataverse, } else { this.metadataBlocks = null; } - this.resetRelationsOnNullValues = resetRelationsOnNullValues; } @Override @@ -59,46 +56,61 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { return ctxt.dataverses().save(dataverse); } + /* + metadataBlocks = null - ignore + metadataBlocks is empty - delete and inherit from parent + metadataBlocks is not empty - set with new updated values + */ private void processMetadataBlocks() { - if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - dataverse.setMetadataBlockRoot(true); - dataverse.setMetadataBlocks(metadataBlocks); - } else if (resetRelationsOnNullValues) { - dataverse.setMetadataBlockRoot(false); - dataverse.clearMetadataBlocks(); + if (metadataBlocks != null) { + if (metadataBlocks.isEmpty()) { + dataverse.setMetadataBlockRoot(false); + dataverse.clearMetadataBlocks(); + } else { + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); + } } } + /* + facets = null - ignore + facets is empty - delete and inherit from parent + facets is not empty - set with new updated values + */ private void processFacets(CommandContext ctxt) { if (facets != null) { - ctxt.facets().deleteFacetsFor(dataverse); - dataverse.setDataverseFacets(new ArrayList<>()); - - if (!facets.isEmpty()) { + if (facets.isEmpty()) { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setFacetRoot(false); + } else { + ctxt.facets().deleteFacetsFor(dataverse); + dataverse.setDataverseFacets(new ArrayList<>()); dataverse.setFacetRoot(true); + for (int i = 0; i < facets.size(); i++) { + ctxt.facets().create(i, facets.get(i), dataverse); + } } - - for (int i = 0; i < facets.size(); i++) { - ctxt.facets().create(i, facets.get(i), dataverse); - } - } else if (resetRelationsOnNullValues) { - ctxt.facets().deleteFacetsFor(dataverse); - dataverse.setFacetRoot(false); } } + /* + inputLevels = null - ignore + inputLevels is empty - delete + inputLevels is not empty - set with new updated values + */ private void processInputLevels(CommandContext ctxt) { if (inputLevels != null) { - if (!inputLevels.isEmpty()) { + if (inputLevels.isEmpty()) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + } else { dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + inputLevels.forEach(inputLevel -> { + inputLevel.setDataverse(dataverse); + ctxt.fieldTypeInputLevels().create(inputLevel); + }); } - ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); - inputLevels.forEach(inputLevel -> { - inputLevel.setDataverse(dataverse); - ctxt.fieldTypeInputLevels().create(inputLevel); - }); - } else if (resetRelationsOnNullValues) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 3728f3ee6ce..145cfb6199c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created, List facets, List inputLevels, List metadataBlocks) { - super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks, false); + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java index 8cffcd3d821..4a897adefa2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateRoleCommand.java @@ -22,12 +22,12 @@ @RequiredPermissions(Permission.ManageDataversePermissions) public class CreateRoleCommand extends AbstractCommand { - private final DataverseRole created; + private final DataverseRole role; private final Dataverse dv; public CreateRoleCommand(DataverseRole aRole, DataverseRequest aRequest, Dataverse anAffectedDataverse) { super(aRequest, anAffectedDataverse); - created = aRole; + role = aRole; dv = anAffectedDataverse; } @@ -41,16 +41,16 @@ public DataverseRole execute(CommandContext ctxt) throws CommandException { //Test to see if the role already exists in DB try { DataverseRole testRole = ctxt.em().createNamedQuery("DataverseRole.findDataverseRoleByAlias", DataverseRole.class) - .setParameter("alias", created.getAlias()) + .setParameter("alias", role.getAlias()) .getSingleResult(); - if (!(testRole == null)) { + if (testRole != null && !testRole.getId().equals(role.getId())) { throw new IllegalCommandException(BundleUtil.getStringFromBundle("permission.role.not.created.alias.already.exists"), this); } } catch (NoResultException nre) { - // we want no results because that meand we can create a role + // we want no results because that meant we can create a role } - dv.addRole(created); - return ctxt.roles().save(created); + dv.addRole(role); + return ctxt.roles().save(role); } } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 6dc4ab4d00d..55cc3708097 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -32,7 +32,7 @@ public UpdateDataverseCommand(Dataverse dataverse, List featuredDataverses, DataverseRequest request, List inputLevels) { - this(dataverse, facets, featuredDataverses, request, inputLevels, null, null, false); + this(dataverse, facets, featuredDataverses, request, inputLevels, null, null); } public UpdateDataverseCommand(Dataverse dataverse, @@ -41,9 +41,8 @@ public UpdateDataverseCommand(Dataverse dataverse, DataverseRequest request, List inputLevels, List metadataBlocks, - DataverseDTO updatedDataverseDTO, - boolean resetRelationsOnNullValues) { - super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks, resetRelationsOnNullValues); + DataverseDTO updatedDataverseDTO) { + super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks); if (featuredDataverses != null) { this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java index ba6f5c3dec2..6a85219cc3c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/ClientHarvestRun.java @@ -6,7 +6,10 @@ package edu.harvard.iq.dataverse.harvest.client; import java.io.Serializable; +import java.util.Arrays; import java.util.Date; + +import edu.harvard.iq.dataverse.util.BundleUtil; import jakarta.persistence.Entity; import jakarta.persistence.GeneratedValue; import jakarta.persistence.GenerationType; @@ -40,13 +43,7 @@ public void setId(Long id) { this.id = id; } - public enum RunResultType { SUCCESS, FAILURE, INPROGRESS, INTERRUPTED }; - - private static String RESULT_LABEL_SUCCESS = "SUCCESS"; - private static String RESULT_LABEL_FAILURE = "FAILED"; - private static String RESULT_LABEL_INPROGRESS = "IN PROGRESS"; - private static String RESULT_DELETE_IN_PROGRESS = "DELETE IN PROGRESS"; - private static String RESULT_LABEL_INTERRUPTED = "INTERRUPTED"; + public enum RunResultType { COMPLETED, COMPLETED_WITH_FAILURES, FAILURE, IN_PROGRESS, INTERRUPTED } @ManyToOne @JoinColumn(nullable = false) @@ -68,36 +65,43 @@ public RunResultType getResult() { public String getResultLabel() { if (harvestingClient != null && harvestingClient.isDeleteInProgress()) { - return RESULT_DELETE_IN_PROGRESS; + return BundleUtil.getStringFromBundle("harvestclients.result.deleteInProgress"); } - - if (isSuccess()) { - return RESULT_LABEL_SUCCESS; + + if (isCompleted()) { + return BundleUtil.getStringFromBundle("harvestclients.result.completed"); + } else if (isCompletedWithFailures()) { + return BundleUtil.getStringFromBundle("harvestclients.result.completedWithFailures"); } else if (isFailed()) { - return RESULT_LABEL_FAILURE; + return BundleUtil.getStringFromBundle("harvestclients.result.failure"); } else if (isInProgress()) { - return RESULT_LABEL_INPROGRESS; + return BundleUtil.getStringFromBundle("harvestclients.result.inProgess"); } else if (isInterrupted()) { - return RESULT_LABEL_INTERRUPTED; + return BundleUtil.getStringFromBundle("harvestclients.result.interrupted"); } return null; } public String getDetailedResultLabel() { if (harvestingClient != null && harvestingClient.isDeleteInProgress()) { - return RESULT_DELETE_IN_PROGRESS; + return BundleUtil.getStringFromBundle("harvestclients.result.deleteInProgress"); } - if (isSuccess() || isInterrupted()) { + if (isCompleted() || isCompletedWithFailures() || isInterrupted()) { String resultLabel = getResultLabel(); - - resultLabel = resultLabel.concat("; "+harvestedDatasetCount+" harvested, "); - resultLabel = resultLabel.concat(deletedDatasetCount+" deleted, "); - resultLabel = resultLabel.concat(failedDatasetCount+" failed."); + + String details = BundleUtil.getStringFromBundle("harvestclients.result.details", Arrays.asList( + harvestedDatasetCount.toString(), + deletedDatasetCount.toString(), + failedDatasetCount.toString() + )); + if(details != null) { + resultLabel = resultLabel + "; " + details; + } return resultLabel; } else if (isFailed()) { - return RESULT_LABEL_FAILURE; + return BundleUtil.getStringFromBundle("harvestclients.result.failure"); } else if (isInProgress()) { - return RESULT_LABEL_INPROGRESS; + return BundleUtil.getStringFromBundle("harvestclients.result.inProgess"); } return null; } @@ -106,12 +110,20 @@ public void setResult(RunResultType harvestResult) { this.harvestResult = harvestResult; } - public boolean isSuccess() { - return RunResultType.SUCCESS == harvestResult; + public boolean isCompleted() { + return RunResultType.COMPLETED == harvestResult; + } + + public void setCompleted() { + harvestResult = RunResultType.COMPLETED; + } + + public boolean isCompletedWithFailures() { + return RunResultType.COMPLETED_WITH_FAILURES == harvestResult; } - public void setSuccess() { - harvestResult = RunResultType.SUCCESS; + public void setCompletedWithFailures() { + harvestResult = RunResultType.COMPLETED_WITH_FAILURES; } public boolean isFailed() { @@ -123,12 +135,12 @@ public void setFailed() { } public boolean isInProgress() { - return RunResultType.INPROGRESS == harvestResult || + return RunResultType.IN_PROGRESS == harvestResult || (harvestResult == null && startTime != null && finishTime == null); } public void setInProgress() { - harvestResult = RunResultType.INPROGRESS; + harvestResult = RunResultType.IN_PROGRESS; } public boolean isInterrupted() { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java index e0b5c2dfbfb..16580f8f9f1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvesterServiceBean.java @@ -163,7 +163,7 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId try { if (harvestingClientConfig.isHarvestingNow()) { - hdLogger.log(Level.SEVERE, "Cannot start harvest, client " + harvestingClientConfig.getName() + " is already harvesting."); + hdLogger.log(Level.SEVERE, String.format("Cannot start harvest, client %s is already harvesting.", harvestingClientConfig.getName())); } else { harvestingClientService.resetHarvestInProgress(harvestingClientId); @@ -176,9 +176,16 @@ public void doHarvest(DataverseRequest dataverseRequest, Long harvestingClientId } else { throw new IOException("Unsupported harvest type"); } - harvestingClientService.setHarvestSuccess(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); - hdLogger.log(Level.INFO, "COMPLETED HARVEST, server=" + harvestingClientConfig.getArchiveUrl() + ", metadataPrefix=" + harvestingClientConfig.getMetadataPrefix()); - hdLogger.log(Level.INFO, "Datasets created/updated: " + harvestedDatasetIds.size() + ", datasets deleted: " + deletedIdentifiers.size() + ", datasets failed: " + failedIdentifiers.size()); + + if (failedIdentifiers.isEmpty()) { + harvestingClientService.setHarvestCompleted(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); + hdLogger.log(Level.INFO, String.format("\"COMPLETED HARVEST, server=%s, metadataPrefix=%s", harvestingClientConfig.getArchiveUrl(), harvestingClientConfig.getMetadataPrefix())); + } else { + harvestingClientService.setHarvestCompletedWithFailures(harvestingClientId, new Date(), harvestedDatasetIds.size(), failedIdentifiers.size(), deletedIdentifiers.size()); + hdLogger.log(Level.INFO, String.format("\"COMPLETED HARVEST WITH FAILURES, server=%s, metadataPrefix=%s", harvestingClientConfig.getArchiveUrl(), harvestingClientConfig.getMetadataPrefix())); + } + + hdLogger.log(Level.INFO, String.format("Datasets created/updated: %s, datasets deleted: %s, datasets failed: %s", harvestedDatasetIds.size(), deletedIdentifiers.size(), failedIdentifiers.size())); } } catch (StopHarvestException she) { diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java index 7280b6af129..e73310650b4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClient.java @@ -297,7 +297,7 @@ public ClientHarvestRun getLastSuccessfulRun() { int i = harvestHistory.size() - 1; while (i > -1) { - if (harvestHistory.get(i).isSuccess()) { + if (harvestHistory.get(i).isCompleted() || harvestHistory.get(i).isCompletedWithFailures()) { return harvestHistory.get(i); } i--; @@ -314,7 +314,7 @@ ClientHarvestRun getLastNonEmptyRun() { int i = harvestHistory.size() - 1; while (i > -1) { - if (harvestHistory.get(i).isSuccess()) { + if (harvestHistory.get(i).isCompleted() || harvestHistory.get(i).isCompletedWithFailures()) { if (harvestHistory.get(i).getHarvestedDatasetCount().longValue() > 0 || harvestHistory.get(i).getDeletedDatasetCount().longValue() > 0) { return harvestHistory.get(i); diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java index 7ec6d75a41c..2f76fed1a11 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/HarvestingClientServiceBean.java @@ -164,8 +164,13 @@ public void deleteClient(Long clientId) { } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) - public void setHarvestSuccess(Long hcId, Date currentTime, int harvestedCount, int failedCount, int deletedCount) { - recordHarvestJobStatus(hcId, currentTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.SUCCESS); + public void setHarvestCompleted(Long hcId, Date currentTime, int harvestedCount, int failedCount, int deletedCount) { + recordHarvestJobStatus(hcId, currentTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.COMPLETED); + } + + @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) + public void setHarvestCompletedWithFailures(Long hcId, Date currentTime, int harvestedCount, int failedCount, int deletedCount) { + recordHarvestJobStatus(hcId, currentTime, harvestedCount, failedCount, deletedCount, ClientHarvestRun.RunResultType.COMPLETED_WITH_FAILURES); } @TransactionAttribute(TransactionAttributeType.REQUIRES_NEW) diff --git a/src/main/java/edu/harvard/iq/dataverse/harvest/client/oai/OaiHandler.java b/src/main/java/edu/harvard/iq/dataverse/harvest/client/oai/OaiHandler.java index bb3dc06972c..4345fb44d8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/harvest/client/oai/OaiHandler.java +++ b/src/main/java/edu/harvard/iq/dataverse/harvest/client/oai/OaiHandler.java @@ -4,6 +4,7 @@ import io.gdcc.xoai.model.oaipmh.results.record.Header; import io.gdcc.xoai.model.oaipmh.results.MetadataFormat; import io.gdcc.xoai.model.oaipmh.results.Set; +import io.gdcc.xoai.model.oaipmh.verbs.Identify; import io.gdcc.xoai.serviceprovider.ServiceProvider; import io.gdcc.xoai.serviceprovider.exceptions.BadArgumentException; import io.gdcc.xoai.serviceprovider.exceptions.InvalidOAIResponse; @@ -289,6 +290,8 @@ private ListIdentifiersParameters buildListIdentifiersParams() throws OaiHandler mip.withMetadataPrefix(metadataPrefix); if (this.fromDate != null) { + Identify identify = runIdentify(); + mip.withGranularity(identify.getGranularity().toString()); mip.withFrom(this.fromDate.toInstant()); } @@ -311,10 +314,13 @@ public String getProprietaryDataverseMetadataURL(String identifier) { return requestURL.toString(); } - public void runIdentify() { - // not implemented yet - // (we will need it, both for validating the remote server, - // and to learn about its extended capabilities) + public Identify runIdentify() throws OaiHandlerException { + ServiceProvider sp = getServiceProvider(); + try { + return sp.identify(); + } catch (InvalidOAIResponse ior) { + throw new OaiHandlerException("No valid response received from the OAI server."); + } } public Map makeCustomHeaders(String headersString) { diff --git a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java index 5626a442762..091fbde484e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java +++ b/src/main/java/edu/harvard/iq/dataverse/mydata/MyDataFinder.java @@ -439,19 +439,6 @@ private boolean runStep1RoleAssignments() { if (results == null) { this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.null")); return false; - } else if (results.isEmpty()) { - List roleNames = this.rolePermissionHelper.getRoleNamesByIdList(this.filterParams.getRoleIds()); - if ((roleNames == null) || (roleNames.isEmpty())) { - this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.no.role")); - } else { - final List args = Arrays.asList(StringUtils.join(roleNames, ", ")); - if (roleNames.size() == 1) { - this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.role.empty", args)); - } else { - this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.roles.empty", args)); - } - } - return false; } // Iterate through assigned objects, a single object may end up in @@ -485,6 +472,21 @@ private boolean runStep1RoleAssignments() { } directDvObjectIds.add(dvId); } + + if (directDvObjectIds.isEmpty()) { + List roleNames = this.rolePermissionHelper.getRoleNamesByIdList(this.filterParams.getRoleIds()); + if ((roleNames == null) || (roleNames.isEmpty())) { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.no.role")); + } else { + final List args = Arrays.asList(StringUtils.join(roleNames, ", ")); + if (roleNames.size() == 1) { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.role.empty", args)); + } else { + this.addErrorMessage(BundleUtil.getStringFromBundle("myDataFinder.error.result.roles.empty", args)); + } + } + return false; + } return true; } diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProviderFactoryBean.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProviderFactoryBean.java index b01fb5e7eba..c4d6aa4ea21 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProviderFactoryBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidProviderFactoryBean.java @@ -205,7 +205,7 @@ private void loadProviders() { passphrase); break; case "perma": - String baseUrl = JvmSettings.LEGACY_PERMALINK_BASEURL.lookup(); + String baseUrl = JvmSettings.LEGACY_PERMALINK_BASEURL.lookupOptional().orElse(SystemConfig.getDataverseSiteUrlStatic()); legacy = new PermaLinkPidProvider("legacy", "legacy", authority, shoulder, identifierGenerationStyle, dataFilePidFormat, "", "", baseUrl, PermaLinkPidProvider.SEPARATOR); diff --git a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java index 279f18dcd0e..003b4e3f61c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/pidproviders/PidUtil.java @@ -3,6 +3,7 @@ import edu.harvard.iq.dataverse.GlobalId; import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; import edu.harvard.iq.dataverse.pidproviders.handle.HandlePidProvider; +import edu.harvard.iq.dataverse.pidproviders.perma.PermaLinkPidProvider; import edu.harvard.iq.dataverse.util.BundleUtil; import java.io.IOException; import java.io.InputStream; @@ -252,7 +253,12 @@ public static void clearPidProviders() { * Get a PidProvider by protocol/authority/shoulder. */ public static PidProvider getPidProvider(String protocol, String authority, String shoulder) { - return getPidProvider(protocol, authority, shoulder, AbstractPidProvider.SEPARATOR); + switch(protocol) { + case PermaLinkPidProvider.PERMA_PROTOCOL: + return getPidProvider(protocol, authority, shoulder, PermaLinkPidProvider.SEPARATOR); + default: + return getPidProvider(protocol, authority, shoulder, AbstractPidProvider.SEPARATOR); + } } public static PidProvider getPidProvider(String protocol, String authority, String shoulder, String separator) { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/AbstractSolrClientService.java b/src/main/java/edu/harvard/iq/dataverse/search/AbstractSolrClientService.java new file mode 100644 index 00000000000..1ae236d348f --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/search/AbstractSolrClientService.java @@ -0,0 +1,51 @@ +package edu.harvard.iq.dataverse.search; + +import java.io.IOException; +import java.util.logging.Logger; + +import org.apache.solr.client.solrj.SolrClient; + +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.util.SystemConfig; +import jakarta.ejb.EJB; + +/** + * Generics methods for Solr clients implementations + * + * @author jeromeroucou + */ +public abstract class AbstractSolrClientService { + private static final Logger logger = Logger.getLogger(AbstractSolrClientService.class.getCanonicalName()); + + @EJB + SystemConfig systemConfig; + + public abstract void init(); + public abstract void close(); + public abstract SolrClient getSolrClient(); + public abstract void setSolrClient(SolrClient solrClient); + + public void close(SolrClient solrClient) { + if (solrClient != null) { + try { + solrClient.close(); + } catch (IOException e) { + logger.warning("Solr closing error: " + e); + } + solrClient = null; + } + } + + public void reInitialize() { + close(); + init(); + } + + public String getSolrUrl() { + // Get from MPCONFIG. Might be configured by a sysadmin or simply return the + // default shipped with resources/META-INF/microprofile-config.properties. + final String protocol = JvmSettings.SOLR_PROT.lookup(); + final String path = JvmSettings.SOLR_PATH.lookup(); + return protocol + "://" + this.systemConfig.getSolrHostColonPort() + path; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java index 3f60a9bd1a2..839dd4a7e08 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java @@ -1,8 +1,34 @@ package edu.harvard.iq.dataverse.search; -import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.ControlledVocabularyValue; +import edu.harvard.iq.dataverse.DataFile; +import edu.harvard.iq.dataverse.DataFileServiceBean; +import edu.harvard.iq.dataverse.DataFileTag; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; +import edu.harvard.iq.dataverse.DatasetFieldConstant; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; +import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetFieldValue; +import edu.harvard.iq.dataverse.DatasetFieldValueValidator; +import edu.harvard.iq.dataverse.DatasetLinkingServiceBean; +import edu.harvard.iq.dataverse.DatasetServiceBean; +import edu.harvard.iq.dataverse.DatasetVersion; import edu.harvard.iq.dataverse.DatasetVersion.VersionState; +import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseLinkingServiceBean; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.DvObject.DType; +import edu.harvard.iq.dataverse.DvObjectServiceBean; +import edu.harvard.iq.dataverse.Embargo; +import edu.harvard.iq.dataverse.FileMetadata; +import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.PermissionServiceBean; +import edu.harvard.iq.dataverse.Retention; import edu.harvard.iq.dataverse.authorization.AuthenticationServiceBean; import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUserServiceBean; import edu.harvard.iq.dataverse.batch.util.LoggingUtil; @@ -48,8 +74,6 @@ import java.util.logging.Logger; import java.util.regex.Pattern; import java.util.stream.Collectors; -import jakarta.annotation.PostConstruct; -import jakarta.annotation.PreDestroy; import jakarta.ejb.AsyncResult; import jakarta.ejb.Asynchronous; import jakarta.ejb.EJB; @@ -66,11 +90,9 @@ import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.StringUtils; -import org.apache.solr.client.solrj.SolrClient; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrQuery.SortClause; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.UpdateResponse; import org.apache.solr.common.SolrDocument; @@ -125,16 +147,15 @@ public class IndexServiceBean { @EJB SettingsServiceBean settingsService; @EJB - SolrClientService solrClientService; + SolrClientService solrClientService; // only for query index on Solr + @EJB + SolrClientIndexService solrClientIndexService; // only for add, update, or remove index on Solr @EJB DataFileServiceBean dataFileService; @EJB VariableServiceBean variableService; - - @EJB - IndexBatchServiceBean indexBatchService; - + @EJB DatasetFieldServiceBean datasetFieldService; @@ -157,37 +178,10 @@ public class IndexServiceBean { private static final String IN_REVIEW_STRING = "In Review"; private static final String DEACCESSIONED_STRING = "Deaccessioned"; public static final String HARVESTED = "Harvested"; - private String rootDataverseName; private Dataverse rootDataverseCached; - SolrClient solrServer; private VariableMetadataUtil variableMetadataUtil; - @PostConstruct - public void init() { - // Get from MPCONFIG. Might be configured by a sysadmin or simply return the default shipped with - // resources/META-INF/microprofile-config.properties. - String protocol = JvmSettings.SOLR_PROT.lookup(); - String path = JvmSettings.SOLR_PATH.lookup(); - - String urlString = protocol + "://" + systemConfig.getSolrHostColonPort() + path; - solrServer = new HttpSolrClient.Builder(urlString).build(); - - rootDataverseName = findRootDataverseCached().getName(); - } - - @PreDestroy - public void close() { - if (solrServer != null) { - try { - solrServer.close(); - } catch (IOException e) { - logger.warning("Solr closing error: " + e); - } - solrServer = null; - } - } - @TransactionAttribute(REQUIRES_NEW) public Future indexDataverseInNewTransaction(Dataverse dataverse) throws SolrServerException, IOException{ return indexDataverse(dataverse, false); @@ -326,7 +320,7 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths) String status; try { if (dataverse.getId() != null) { - solrClientService.getSolrClient().add(docs); + solrClientIndexService.getSolrClient().add(docs); } else { logger.info("WARNING: indexing of a dataverse with no id attempted"); } @@ -1745,7 +1739,7 @@ private String addOrUpdateDataset(IndexableDataset indexableDataset, Set d final SolrInputDocuments docs = toSolrDocs(indexableDataset, datafilesInDraftVersion); try { - solrClientService.getSolrClient().add(docs.getDocuments()); + solrClientIndexService.getSolrClient().add(docs.getDocuments()); } catch (SolrServerException | IOException ex) { if (ex.getCause() instanceof SolrServerException) { throw new SolrServerException(ex); @@ -2007,7 +2001,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc sid.removeField(SearchFields.SUBTREE); sid.addField(SearchFields.SUBTREE, paths); - UpdateResponse addResponse = solrClientService.getSolrClient().add(sid); + UpdateResponse addResponse = solrClientIndexService.getSolrClient().add(sid); if (object.isInstanceofDataset()) { for (DataFile df : dataset.getFiles()) { solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString())); @@ -2020,7 +2014,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc } sid.removeField(SearchFields.SUBTREE); sid.addField(SearchFields.SUBTREE, paths); - addResponse = solrClientService.getSolrClient().add(sid); + addResponse = solrClientIndexService.getSolrClient().add(sid); } } } @@ -2062,7 +2056,7 @@ public String delete(Dataverse doomed) { logger.fine("deleting Solr document for dataverse " + doomed.getId()); UpdateResponse updateResponse; try { - updateResponse = solrClientService.getSolrClient().deleteById(solrDocIdentifierDataverse + doomed.getId()); + updateResponse = solrClientIndexService.getSolrClient().deleteById(solrDocIdentifierDataverse + doomed.getId()); } catch (SolrServerException | IOException ex) { return ex.toString(); } @@ -2082,7 +2076,7 @@ public String removeSolrDocFromIndex(String doomed) { logger.fine("deleting Solr document: " + doomed); UpdateResponse updateResponse; try { - updateResponse = solrClientService.getSolrClient().deleteById(doomed); + updateResponse = solrClientIndexService.getSolrClient().deleteById(doomed); } catch (SolrServerException | IOException ex) { return ex.toString(); } @@ -2285,7 +2279,7 @@ public List findPermissionsInSolrOnly() throws SearchException { boolean done = false; while (!done) { q.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark); - QueryResponse rsp = solrServer.query(q); + QueryResponse rsp = solrClientService.getSolrClient().query(q); String nextCursorMark = rsp.getNextCursorMark(); logger.fine("Next cursor mark (1K entries): " + nextCursorMark); SolrDocumentList list = rsp.getResults(); @@ -2367,7 +2361,7 @@ private List findDvObjectInSolrOnly(String type) throws SearchException solrQuery.set(CursorMarkParams.CURSOR_MARK_PARAM, cursorMark); QueryResponse rsp = null; try { - rsp = solrServer.query(solrQuery); + rsp = solrClientService.getSolrClient().query(solrQuery); } catch (SolrServerException | IOException ex) { throw new SearchException("Error searching Solr type: " + type, ex); diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java index 493dbfcecc4..d50fbee681c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java @@ -335,9 +335,13 @@ public SolrQueryResponse search( // ----------------------------------- // PERMISSION FILTER QUERY // ----------------------------------- - String permissionFilterQuery = this.getPermissionFilterQuery(dataverseRequest, solrQuery, onlyDatatRelatedToMe, addFacets); - if (!StringUtils.isBlank(permissionFilterQuery)) { - solrQuery.addFilterQuery(permissionFilterQuery); + String permissionFilterQuery = getPermissionFilterQuery(dataverseRequest, solrQuery, onlyDatatRelatedToMe, addFacets); + if (!permissionFilterQuery.isEmpty()) { + String[] filterParts = permissionFilterQuery.split("&q1="); + solrQuery.addFilterQuery(filterParts[0]); + if(filterParts.length > 1 ) { + solrQuery.add("q1", filterParts[1]); + } } /** @@ -1099,9 +1103,9 @@ private String buildPermissionFilterQuery(boolean avoidJoin, String permissionFi String query = (avoidJoin&& !isAllGroups(permissionFilterGroups)) ? SearchFields.PUBLIC_OBJECT + ":" + true : ""; if (permissionFilterGroups != null && !isAllGroups(permissionFilterGroups)) { if (!query.isEmpty()) { - query = "(" + query + " OR " + "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups + ")"; + query = "(" + query + " OR " + "{!join from=" + SearchFields.DEFINITION_POINT + " to=id v=$q1})&q1=" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups; } else { - query = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id}" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups; + query = "{!join from=" + SearchFields.DEFINITION_POINT + " to=id v=$q1}&q1=" + SearchFields.DISCOVERABLE_BY + ":" + permissionFilterGroups; } } return query; diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientIndexService.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientIndexService.java new file mode 100644 index 00000000000..0b7f1aae798 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientIndexService.java @@ -0,0 +1,49 @@ +package edu.harvard.iq.dataverse.search; + +import java.util.logging.Logger; + +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.impl.ConcurrentUpdateHttp2SolrClient; +import org.apache.solr.client.solrj.impl.Http2SolrClient; + +import jakarta.annotation.PostConstruct; +import jakarta.annotation.PreDestroy; +import jakarta.ejb.Singleton; +import jakarta.inject.Named; + +/** + * Solr client to provide insert/update/delete operations. + * Don't use this service with queries to Solr, use {@link SolrClientService} instead. + */ +@Named +@Singleton +public class SolrClientIndexService extends AbstractSolrClientService { + + private static final Logger logger = Logger.getLogger(SolrClientIndexService.class.getCanonicalName()); + + private SolrClient solrClient; + + @PostConstruct + public void init() { + solrClient = new ConcurrentUpdateHttp2SolrClient.Builder( + getSolrUrl(), new Http2SolrClient.Builder().build()).build(); + } + + @PreDestroy + public void close() { + close(solrClient); + } + + public SolrClient getSolrClient() { + // Should never happen - but? + if (solrClient == null) { + init(); + } + return solrClient; + } + + public void setSolrClient(SolrClient solrClient) { + this.solrClient = solrClient; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java index b36130de7c8..f9d94b8c6d3 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrClientService.java @@ -1,65 +1,39 @@ -/* - * To change this license header, choose License Headers in Project Properties. - * To change this template file, choose Tools | Templates - * and open the template in the editor. - */ package edu.harvard.iq.dataverse.search; -import edu.harvard.iq.dataverse.settings.JvmSettings; -import edu.harvard.iq.dataverse.util.SystemConfig; import org.apache.solr.client.solrj.SolrClient; -import org.apache.solr.client.solrj.impl.HttpSolrClient; +import org.apache.solr.client.solrj.impl.Http2SolrClient; import jakarta.annotation.PostConstruct; import jakarta.annotation.PreDestroy; -import jakarta.ejb.EJB; import jakarta.ejb.Singleton; import jakarta.inject.Named; -import java.io.IOException; import java.util.logging.Logger; /** * * @author landreev * - * This singleton is dedicated to initializing the HttpSolrClient used by the - * application to talk to the search engine, and serving it to all the other - * classes that need it. + * This singleton is dedicated to initializing the Http2SolrClient, used by + * the application to talk to the search engine, and serving it to all the + * other classes that need it. * This ensures that we are using one client only - as recommended by the * documentation. */ @Named @Singleton -public class SolrClientService { +public class SolrClientService extends AbstractSolrClientService { private static final Logger logger = Logger.getLogger(SolrClientService.class.getCanonicalName()); - @EJB - SystemConfig systemConfig; - private SolrClient solrClient; @PostConstruct public void init() { - // Get from MPCONFIG. Might be configured by a sysadmin or simply return the default shipped with - // resources/META-INF/microprofile-config.properties. - String protocol = JvmSettings.SOLR_PROT.lookup(); - String path = JvmSettings.SOLR_PATH.lookup(); - - String urlString = protocol + "://" + systemConfig.getSolrHostColonPort() + path; - solrClient = new HttpSolrClient.Builder(urlString).build(); + solrClient = new Http2SolrClient.Builder(getSolrUrl()).build(); } @PreDestroy public void close() { - if (solrClient != null) { - try { - solrClient.close(); - } catch (IOException e) { - logger.warning("Solr closing error: " + e); - } - - solrClient = null; - } + close(solrClient); } public SolrClient getSolrClient() { @@ -73,9 +47,4 @@ public SolrClient getSolrClient() { public void setSolrClient(SolrClient solrClient) { this.solrClient = solrClient; } - - public void reInitialize() { - close(); - init(); - } } diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java index e4d885276d0..2b4f08807ef 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java @@ -46,9 +46,7 @@ public class SolrIndexServiceBean { @EJB DataverseRoleServiceBean rolesSvc; @EJB - IndexServiceBean indexService; - @EJB - SolrClientService solrClientService; + SolrClientIndexService solrClientService; public static String numRowsClearedByClearAllIndexTimes = "numRowsClearedByClearAllIndexTimes"; public static String messageString = "message"; @@ -155,7 +153,15 @@ private List constructDatafileSolrDocs(DataFile dataFile, Map desiredCards = searchPermissionsService.getDesiredCards(dataFile.getOwner()); for (DatasetVersion datasetVersionFileIsAttachedTo : datasetVersionsToBuildCardsFor(dataFile.getOwner())) { boolean cardShouldExist = desiredCards.get(datasetVersionFileIsAttachedTo.getVersionState()); - if (cardShouldExist) { + /* + * Since datasetVersionFileIsAttachedTo should be a draft or the most recent + * released one, it could be more efficient to stop the search through + * FileMetadatas after those two (versus continuing through all prior versions + * as in isInDatasetVersion). Alternately, perhaps filesToReIndexPermissionsFor + * should not combine the list of files for the different datsetversions into a + * single list to start with. + */ + if (cardShouldExist && dataFile.isInDatasetVersion(datasetVersionFileIsAttachedTo)) { String solrIdStart = IndexServiceBean.solrDocIdentifierFile + dataFile.getId(); String solrIdEnd = getDatasetOrDataFileSolrEnding(datasetVersionFileIsAttachedTo.getVersionState()); String solrId = solrIdStart + solrIdEnd; @@ -375,6 +381,12 @@ public IndexResponse indexPermissionsOnSelfAndChildren(long definitionPointId) { * inheritance */ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) { + + if (definitionPoint == null) { + logger.log(Level.WARNING, "Cannot perform indexPermissionsOnSelfAndChildren with a definitionPoint null"); + return null; + } + List filesToReindexAsBatch = new ArrayList<>(); /** * @todo Re-indexing the definition point itself seems to be necessary diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java index 2242b0f51c6..04ae0018323 100644 --- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java +++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java @@ -90,7 +90,6 @@ public enum FeatureFlags { * @since Dataverse 6.3 */ INDEX_HARVESTED_METADATA_SOURCE("index-harvested-metadata-source"), - /** * Dataverse normally deletes all solr documents related to a dataset's files * when the dataset is reindexed. With this flag enabled, additional logic is diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java index 8ba3ee177e8..b291eedc751 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonPrinter.java @@ -423,11 +423,17 @@ public static JsonObjectBuilder json(FileDetailsHolder ds) { } public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles) { - return json(dsv, null, includeFiles, false); + return json(dsv, null, includeFiles, false,true); + } + public static JsonObjectBuilder json(DatasetVersion dsv, boolean includeFiles, boolean includeMetadataBlocks) { + return json(dsv, null, includeFiles, false, includeMetadataBlocks); + } + public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, + boolean includeFiles, boolean returnOwners) { + return json( dsv, anonymizedFieldTypeNamesList, includeFiles, returnOwners,true); } - public static JsonObjectBuilder json(DatasetVersion dsv, List anonymizedFieldTypeNamesList, - boolean includeFiles, boolean returnOwners) { + boolean includeFiles, boolean returnOwners, boolean includeMetadataBlocks) { Dataset dataset = dsv.getDataset(); JsonObjectBuilder bld = jsonObjectBuilder() .add("id", dsv.getId()).add("datasetId", dataset.getId()) @@ -472,11 +478,12 @@ public static JsonObjectBuilder json(DatasetVersion dsv, List anonymized .add("sizeOfCollection", dsv.getTermsOfUseAndAccess().getSizeOfCollection()) .add("studyCompletion", dsv.getTermsOfUseAndAccess().getStudyCompletion()) .add("fileAccessRequest", dsv.getTermsOfUseAndAccess().isFileAccessRequest()); - - bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ? - jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList) - : jsonByBlocks(dsv.getDatasetFields()) - ); + if(includeMetadataBlocks) { + bld.add("metadataBlocks", (anonymizedFieldTypeNamesList != null) ? + jsonByBlocks(dsv.getDatasetFields(), anonymizedFieldTypeNamesList) + : jsonByBlocks(dsv.getDatasetFields()) + ); + } if(returnOwners){ bld.add("isPartOf", getOwnersFromDvObject(dataset)); } @@ -643,6 +650,19 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printO .add("displayName", metadataBlock.getDisplayName()) .add("displayOnCreate", metadataBlock.isDisplayOnCreate()); + List datasetFieldTypesList; + + if (ownerDataverse != null) { + datasetFieldTypesList = datasetFieldService.findAllInMetadataBlockAndDataverse( + metadataBlock, ownerDataverse, printOnlyDisplayedOnCreateDatasetFieldTypes); + } else { + datasetFieldTypesList = printOnlyDisplayedOnCreateDatasetFieldTypes + ? datasetFieldService.findAllDisplayedOnCreateInMetadataBlock(metadataBlock) + : metadataBlock.getDatasetFieldTypes(); + } + + Set datasetFieldTypes = filterOutDuplicateDatasetFieldTypes(datasetFieldTypesList); + JsonObjectBuilder fieldsBuilder = Json.createObjectBuilder(); Predicate isNoChild = element -> element.isChild() == false; @@ -672,6 +692,17 @@ public static JsonObjectBuilder json(MetadataBlock metadataBlock, boolean printO return jsonObjectBuilder; } + // This will remove datasetFieldTypes that are in the list but also a child of another datasetFieldType in the list + // Prevents duplicate datasetFieldType information from being returned twice + // See: https://github.com/IQSS/dataverse/issues/10472 + private static Set filterOutDuplicateDatasetFieldTypes(List datasetFieldTypesList) { + // making a copy of the list as to not damage the original when we remove items + List datasetFieldTypes = new ArrayList<>(datasetFieldTypesList); + // exclude/remove datasetFieldTypes if datasetFieldType exists as a child of another datasetFieldType + datasetFieldTypesList.forEach(dsft -> dsft.getChildDatasetFieldTypes().forEach(c -> datasetFieldTypes.remove(c))); + return new TreeSet<>(datasetFieldTypes); + } + public static JsonArrayBuilder jsonDatasetFieldTypes(List fields) { JsonArrayBuilder fieldsJson = Json.createArrayBuilder(); for (DatasetFieldType field : fields) { diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index a61d841adda..85602dd43a1 100644 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -636,6 +636,13 @@ harvestclients.viewEditDialog.archiveDescription.tip=Description of the archival harvestclients.viewEditDialog.archiveDescription.default.generic=This Dataset is harvested from our partners. Clicking the link will take you directly to the archival source of the data. harvestclients.viewEditDialog.btn.save=Save Changes harvestclients.newClientDialog.title.edit=Edit Group {0} +harvestclients.result.completed=Completed +harvestclients.result.completedWithFailures=Completed with failures +harvestclients.result.failure=FAILED +harvestclients.result.inProgess=IN PROGRESS +harvestclients.result.deleteInProgress=DELETE IN PROGRESS +harvestclients.result.interrupted=INTERRUPTED +harvestclients.result.details={0} harvested, {1} deleted, {2} failed. #harvestset.xhtml harvestserver.title=Manage Harvesting Server @@ -977,6 +984,7 @@ dataverse.inputlevels.error.cannotberequiredifnotincluded=The input level for th dataverse.facets.error.fieldtypenotfound=Can't find dataset field type '{0}' dataverse.facets.error.fieldtypenotfacetable=Dataset field type '{0}' is not facetable dataverse.metadatablocks.error.invalidmetadatablockname=Invalid metadata block name: {0} +dataverse.metadatablocks.error.containslistandinheritflag=Metadata block can not contain both {0} and {1}: true dataverse.create.error.jsonparse=Error parsing Json: {0} dataverse.create.error.jsonparsetodataverse=Error parsing the POSTed json into a dataverse: {0} # rolesAndPermissionsFragment.xhtml diff --git a/src/main/resources/db/migration/V6.5.0.2.sql b/src/main/resources/db/migration/V6.5.0.2.sql new file mode 100644 index 00000000000..804ce3c1ea8 --- /dev/null +++ b/src/main/resources/db/migration/V6.5.0.2.sql @@ -0,0 +1,10 @@ +-- Fixes File Access Requests when upgrading from Dataverse 6.0 +-- See: https://github.com/IQSS/dataverse/issues/10714 +DELETE FROM fileaccessrequests +WHERE creation_time <> (SELECT MIN(creation_time) + FROM fileaccessrequests far2 + WHERE far2.datafile_id = fileaccessrequests.datafile_id + AND far2.authenticated_user_id = fileaccessrequests.authenticated_user_id + AND far2.request_state is NULL); + +UPDATE fileaccessrequests SET request_state='CREATED' WHERE request_state is NULL; diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java index 3cd03abeb38..d5c80cde1aa 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataRetrieverApiIT.java @@ -3,8 +3,10 @@ import io.restassured.RestAssured; import io.restassured.response.Response; import edu.harvard.iq.dataverse.api.auth.ApiKeyAuthMechanism; +import edu.harvard.iq.dataverse.authorization.DataverseRole; import edu.harvard.iq.dataverse.util.BundleUtil; +import io.restassured.path.json.JsonPath; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; @@ -44,12 +46,62 @@ public void testRetrieveMyDataAsJsonString() { assertEquals(prettyPrintError("dataretrieverAPI.user.not.found", Arrays.asList(badUserIdentifier)), invalidUserIdentifierResponse.prettyPrint()); assertEquals(OK.getStatusCode(), invalidUserIdentifierResponse.getStatusCode()); - // Call as superuser with valid user identifier + // Call as superuser with valid user identifier and no roles Response createSecondUserResponse = UtilIT.createRandomUser(); String userIdentifier = UtilIT.getUsernameFromResponse(createSecondUserResponse); Response validUserIdentifierResponse = UtilIT.retrieveMyDataAsJsonString(superUserApiToken, userIdentifier, emptyRoleIdsList); assertEquals(prettyPrintError("myDataFinder.error.result.no.role", null), validUserIdentifierResponse.prettyPrint()); assertEquals(OK.getStatusCode(), validUserIdentifierResponse.getStatusCode()); + + // Call as normal user with one valid role and no results + Response createNormalUserResponse = UtilIT.createRandomUser(); + String normalUserUsername = UtilIT.getUsernameFromResponse(createNormalUserResponse); + String normalUserApiToken = UtilIT.getApiTokenFromResponse(createNormalUserResponse); + Response noResultwithOneRoleResponse = UtilIT.retrieveMyDataAsJsonString(normalUserApiToken, "", new ArrayList<>(Arrays.asList(5L))); + assertEquals(prettyPrintError("myDataFinder.error.result.role.empty", Arrays.asList("Dataset Creator")), noResultwithOneRoleResponse.prettyPrint()); + assertEquals(OK.getStatusCode(), noResultwithOneRoleResponse.getStatusCode()); + + // Call as normal user with multiple valid roles and no results + Response noResultWithMultipleRoleResponse = UtilIT.retrieveMyDataAsJsonString(normalUserApiToken, "", new ArrayList<>(Arrays.asList(5L, 6L))); + assertEquals(prettyPrintError("myDataFinder.error.result.roles.empty", Arrays.asList("Dataset Creator, Contributor")), noResultWithMultipleRoleResponse.prettyPrint()); + assertEquals(OK.getStatusCode(), noResultWithMultipleRoleResponse.getStatusCode()); + + // Call as normal user with one valid dataset role and one dataset result + Response createDataverseResponse = UtilIT.createRandomDataverse(normalUserApiToken); + createDataverseResponse.prettyPrint(); + String dataverseAlias = UtilIT.getAliasFromResponse(createDataverseResponse); + + Response createDatasetResponse = UtilIT.createRandomDatasetViaNativeApi(dataverseAlias, normalUserApiToken); + createDatasetResponse.prettyPrint(); + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDatasetResponse); + UtilIT.sleepForReindex(datasetId.toString(), normalUserApiToken, 4); + Response oneDatasetResponse = UtilIT.retrieveMyDataAsJsonString(normalUserApiToken, "", new ArrayList<>(Arrays.asList(6L))); + assertEquals(OK.getStatusCode(), oneDatasetResponse.getStatusCode()); + JsonPath jsonPathOneDataset = oneDatasetResponse.getBody().jsonPath(); + assertEquals(1, jsonPathOneDataset.getInt("data.total_count")); + assertEquals(datasetId, jsonPathOneDataset.getInt("data.items[0].entity_id")); + + // Call as normal user with one valid dataverse role and one dataverse result + UtilIT.grantRoleOnDataverse(dataverseAlias, DataverseRole.DS_CONTRIBUTOR.toString(), + "@" + normalUserUsername, superUserApiToken); + Response oneDataverseResponse = UtilIT.retrieveMyDataAsJsonString(normalUserApiToken, "", new ArrayList<>(Arrays.asList(5L))); + assertEquals(OK.getStatusCode(), oneDataverseResponse.getStatusCode()); + JsonPath jsonPathOneDataverse = oneDataverseResponse.getBody().jsonPath(); + assertEquals(1, jsonPathOneDataverse.getInt("data.total_count")); + assertEquals(dataverseAlias, jsonPathOneDataverse.getString("data.items[0].name")); + + // Clean up + Response deleteDatasetResponse = UtilIT.deleteDatasetViaNativeApi(datasetId, normalUserApiToken); + deleteDatasetResponse.prettyPrint(); + assertEquals(200, deleteDatasetResponse.getStatusCode()); + + Response deleteDataverseResponse = UtilIT.deleteDataverse(dataverseAlias, normalUserApiToken); + deleteDataverseResponse.prettyPrint(); + assertEquals(200, deleteDataverseResponse.getStatusCode()); + + Response deleteUserResponse = UtilIT.deleteUser(normalUserUsername); + deleteUserResponse.prettyPrint(); + assertEquals(200, deleteUserResponse.getStatusCode()); } private static String prettyPrintError(String resourceBundleKey, List params) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java index 1b2d7e9a431..e26064a24ef 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java @@ -731,6 +731,42 @@ public void testCreatePublishDestroyDataset() { } + @Test + public void testHideMetadataBlocksInDatasetVersionsAPI() { + + // Create user + String apiToken = UtilIT.createRandomUserGetToken(); + + // Create user with no permission + String apiTokenNoPerms = UtilIT.createRandomUserGetToken(); + + // Create Collection + String collectionAlias = UtilIT.createRandomCollectionGetAlias(apiToken); + + // Create Dataset + Response createDataset = UtilIT.createRandomDatasetViaNativeApi(collectionAlias, apiToken); + createDataset.then().assertThat() + .statusCode(CREATED.getStatusCode()); + + Integer datasetId = UtilIT.getDatasetIdFromResponse(createDataset); + String datasetPid = JsonPath.from(createDataset.asString()).getString("data.persistentId"); + + // Now check that the metadata is NOT shown, when we ask the versions api to dos o. + boolean excludeMetadata = true; + Response unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken, true,excludeMetadata, false); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.metadataBlocks", equalTo(null)); + + // Now check that the metadata is shown, when we ask the versions api to dos o. + excludeMetadata = false; + unpublishedDraft = UtilIT.getDatasetVersion(datasetPid, DS_VERSION_DRAFT, apiToken,true, excludeMetadata, false); + unpublishedDraft.prettyPrint(); + unpublishedDraft.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.metadataBlocks", notNullValue() ); + } /** * The apis (/api/datasets/{id}/versions and /api/datasets/{id}/versions/{vid} * are already called from other RestAssured tests, in this class and also in FilesIT. diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 13c4c30190b..bd798b931ad 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -14,6 +14,7 @@ import java.io.FileReader; import java.io.IOException; import java.nio.file.Paths; +import java.text.MessageFormat; import java.util.Arrays; import java.util.List; import java.util.logging.Logger; @@ -927,16 +928,18 @@ public void testListMetadataBlocks() { .body("data.size()", equalTo(1)) .body("data[0].name", is("citation")) .body("data[0].fields.title.displayOnCreate", equalTo(true)) - .body("data[0].fields.size()", is(10)) + .body("data[0].fields.size()", is(10)) // 28 - 18 child duplicates .body("data[0].fields.author.childFields.size()", is(4)); Response setMetadataBlocksResponse = UtilIT.setMetadataBlocks(dataverseAlias, Json.createArrayBuilder().add("citation").add("astrophysics"), apiToken); + setMetadataBlocksResponse.prettyPrint(); setMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); String[] testInputLevelNames = {"geographicCoverage", "country", "city", "notesText"}; boolean[] testRequiredInputLevels = {false, true, false, false}; boolean[] testIncludedInputLevels = {false, true, true, false}; Response updateDataverseInputLevelsResponse = UtilIT.updateDataverseInputLevels(dataverseAlias, testInputLevelNames, testRequiredInputLevels, testIncludedInputLevels, apiToken); + updateDataverseInputLevelsResponse.prettyPrint(); updateDataverseInputLevelsResponse.then().assertThat().statusCode(OK.getStatusCode()); // Dataverse not found @@ -947,6 +950,7 @@ public void testListMetadataBlocks() { String[] expectedAllMetadataBlockDisplayNames = {"Astronomy and Astrophysics Metadata", "Citation Metadata", "Geospatial Metadata"}; listMetadataBlocksResponse = UtilIT.listMetadataBlocks(dataverseAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); listMetadataBlocksResponse.then().assertThat().statusCode(OK.getStatusCode()); listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) @@ -1008,14 +1012,13 @@ public void testListMetadataBlocks() { // Since the included property of notesText is set to false, we should retrieve the total number of fields minus one int citationMetadataBlockIndex = geospatialMetadataBlockIndex == 0 ? 1 : 0; listMetadataBlocksResponse.then().assertThat() - .body(String.format("data[%d].fields.size()", citationMetadataBlockIndex), equalTo(34)); + .body(String.format("data[%d].fields.size()", citationMetadataBlockIndex), equalTo(34)); // 79 minus 45 child duplicates // Since the included property of geographicCoverage is set to false, we should retrieve the total number of fields minus one listMetadataBlocksResponse.then().assertThat() .body(String.format("data[%d].fields.size()", geospatialMetadataBlockIndex), equalTo(2)); - - listMetadataBlocksResponse = UtilIT.getMetadataBlock("geospatial"); + listMetadataBlocksResponse = UtilIT.getMetadataBlock("geospatial"); String actualGeospatialMetadataField1 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].name")); String actualGeospatialMetadataField2 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].childFields['country'].name")); String actualGeospatialMetadataField3 = listMetadataBlocksResponse.then().extract().path(String.format("data.fields['geographicCoverage'].childFields['city'].name")); @@ -1346,20 +1349,31 @@ public void testUpdateDataverse() { String[] newFacetIds = new String[]{"contributorName"}; String[] newMetadataBlockNames = new String[]{"citation", "geospatial", "biomedical"}; + // Assert that the error is returned for having both MetadataBlockNames and inheritMetadataBlocksFromParent Response updateDataverseResponse = UtilIT.updateDataverse( - testDataverseAlias, - newAlias, - newName, - newAffiliation, - newDataverseType, - newContactEmails, - newInputLevelNames, - newFacetIds, - newMetadataBlockNames, - apiToken + testDataverseAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, newInputLevelNames, + null, newMetadataBlockNames, apiToken, + Boolean.TRUE, Boolean.TRUE + ); + updateDataverseResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "metadataBlockNames", "inheritMetadataBlocksFromParent"))); + + // Assert that the error is returned for having both facetIds and inheritFacetsFromParent + updateDataverseResponse = UtilIT.updateDataverse( + testDataverseAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, newInputLevelNames, + newFacetIds, null, apiToken, + Boolean.TRUE, Boolean.TRUE ); + updateDataverseResponse.then().assertThat() + .statusCode(BAD_REQUEST.getStatusCode()) + .body("message", equalTo(MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.metadatablocks.error.containslistandinheritflag"), "facetIds", "inheritFacetsFromParent"))); // Assert dataverse properties are updated + updateDataverseResponse = UtilIT.updateDataverse( + testDataverseAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, newInputLevelNames, + newFacetIds, newMetadataBlockNames, apiToken + ); updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); String actualDataverseAlias = updateDataverseResponse.then().extract().path("data.alias"); assertEquals(newAlias, actualDataverseAlias); @@ -1396,7 +1410,60 @@ public void testUpdateDataverse() { Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); + updateDataverseResponse = UtilIT.updateDataverse( + newAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, + null, + null, + null, + apiToken + ); + updateDataverseResponse.prettyPrint(); + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); + + + // Update the dataverse without including metadata blocks, facets, or input levels + // ignore the missing data so the metadata blocks, facets, and input levels are NOT deleted and inherited from the parent + updateDataverseResponse = UtilIT.updateDataverse( + newAlias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, + null, + null, + null, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // Assert that the metadata blocks are untouched and NOT inherited from the parent + listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); + listMetadataBlocksResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(3)) + .body("data[0].name", equalTo(actualDataverseMetadataBlock1)) + .body("data[1].name", equalTo(actualDataverseMetadataBlock2)) + .body("data[2].name", equalTo(actualDataverseMetadataBlock3)); + // Assert that the dataverse should still have its input level(s) + listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + listDataverseInputLevelsResponse.prettyPrint(); + listDataverseInputLevelsResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data[0].datasetFieldTypeName", equalTo("geographicCoverage")); + // Assert that the dataverse should still have its Facets + listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + listDataverseFacetsResponse.prettyPrint(); + listDataverseFacetsResponse + .then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.size()", equalTo(1)) + .body("data", hasItem("contributorName")); + // Update the dataverse without setting metadata blocks, facets, or input levels + // Do NOT ignore the missing data so the metadata blocks, facets, and input levels are deleted and inherited from the parent updateDataverseResponse = UtilIT.updateDataverse( newAlias, newAlias, @@ -1407,12 +1474,14 @@ public void testUpdateDataverse() { null, null, null, - apiToken + apiToken, + Boolean.TRUE, Boolean.TRUE ); updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); // Assert that the metadata blocks are inherited from the parent listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + listMetadataBlocksResponse.prettyPrint(); listMetadataBlocksResponse .then().assertThat() .statusCode(OK.getStatusCode()) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java index 5020e37edb8..f84c5ad1a20 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/HarvestingClientsIT.java @@ -268,7 +268,7 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte assertEquals("inActive", clientStatus, "Unexpected client status: "+clientStatus); // b) Confirm that it has actually succeeded: - assertEquals("SUCCESS", responseJsonPath.getString("data.lastResult"), "Last harvest not reported a success (took "+i+" seconds)"); + assertTrue(responseJsonPath.getString("data.lastResult").contains("Completed"), "Last harvest not reported a success (took "+i+" seconds)"); String harvestTimeStamp = responseJsonPath.getString("data.lastHarvest"); assertNotNull(harvestTimeStamp); @@ -288,6 +288,8 @@ private void harvestingClientRun(boolean allowHarvestingMissingCVV) throws Inte // Let's give the asynchronous indexing an extra sec. to finish: Thread.sleep(1000L); + // Requires the index-harvested-metadata-source Flag feature to be enabled to search on the nickName + // Otherwise, the search must be performed with metadataSource:Harvested Response searchHarvestedDatasets = UtilIT.search("metadataSource:" + nickName, normalUserAPIKey); searchHarvestedDatasets.then().assertThat().statusCode(OK.getStatusCode()); searchHarvestedDatasets.prettyPrint(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java index 3b0b56740eb..316ac579de4 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/MetadataBlocksIT.java @@ -44,8 +44,7 @@ void testListMetadataBlocks() { // returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(false, true); - int expectedNumberOfMetadataFields = 35; - listMetadataBlocksResponse.prettyPrint(); + int expectedNumberOfMetadataFields = 35; // 80 - 45 child duplicates; listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) @@ -57,7 +56,7 @@ void testListMetadataBlocks() { // onlyDisplayedOnCreate=true and returnDatasetFieldTypes=true listMetadataBlocksResponse = UtilIT.listMetadataBlocks(true, true); listMetadataBlocksResponse.prettyPrint(); - expectedNumberOfMetadataFields = 10; + expectedNumberOfMetadataFields = 10; // 28 - 18 child duplicates listMetadataBlocksResponse.then().assertThat() .statusCode(OK.getStatusCode()) .body("data[0].fields", not(equalTo(null))) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java index d15fda3a1a1..7e0a4714b1f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/RolesIT.java @@ -4,6 +4,7 @@ import io.restassured.RestAssured; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; +import static jakarta.ws.rs.core.Response.Status.FORBIDDEN; import java.util.logging.Logger; import static org.hamcrest.CoreMatchers.equalTo; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -69,7 +70,15 @@ public void testCreateDeleteRoles() { body = addBuiltinRoleResponse.getBody().asString(); status = JsonPath.from(body).getString("status"); assertEquals("OK", status); - + + Response createNoPermsUser = UtilIT.createRandomUser(); + createNoPermsUser.prettyPrint(); + String noPermsapiToken = UtilIT.getApiTokenFromResponse(createNoPermsUser); + + Response noPermsResponse = UtilIT.viewDataverseRole("testRole", noPermsapiToken); + noPermsResponse.prettyPrint(); + noPermsResponse.then().assertThat().statusCode(FORBIDDEN.getStatusCode()); + Response viewDataverseRoleResponse = UtilIT.viewDataverseRole("testRole", apiToken); viewDataverseRoleResponse.prettyPrint(); body = viewDataverseRoleResponse.getBody().asString(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 13554793108..6080e7f01ea 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -415,14 +415,13 @@ static Response createSubDataverse(String alias, String category, String apiToke objectBuilder.add("affiliation", affiliation); } - updateDataverseRequestJsonWithMetadataBlocksConfiguration(inputLevelNames, facetIds, metadataBlockNames, objectBuilder); + updateDataverseRequestJsonWithMetadataBlocksConfiguration(inputLevelNames, facetIds, metadataBlockNames, null, null, objectBuilder); JsonObject dvData = objectBuilder.build(); return given() .body(dvData.toString()).contentType(ContentType.JSON) .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken); } - static Response updateDataverse(String alias, String newAlias, String newName, @@ -433,6 +432,23 @@ static Response updateDataverse(String alias, String[] newFacetIds, String[] newMetadataBlockNames, String apiToken) { + + return updateDataverse(alias, newAlias, newName, newAffiliation, newDataverseType, newContactEmails, + newInputLevelNames, newFacetIds, newMetadataBlockNames, apiToken, null, null); + } + + static Response updateDataverse(String alias, + String newAlias, + String newName, + String newAffiliation, + String newDataverseType, + String[] newContactEmails, + String[] newInputLevelNames, + String[] newFacetIds, + String[] newMetadataBlockNames, + String apiToken, + Boolean inheritMetadataBlocksFromParent, + Boolean inheritFacetsFromParent) { JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); for(String contactEmail : newContactEmails) { contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", contactEmail)); @@ -445,7 +461,8 @@ static Response updateDataverse(String alias, .add("dataverseType", newDataverseType) .add("affiliation", newAffiliation); - updateDataverseRequestJsonWithMetadataBlocksConfiguration(newInputLevelNames, newFacetIds, newMetadataBlockNames, jsonBuilder); + updateDataverseRequestJsonWithMetadataBlocksConfiguration(newInputLevelNames, newFacetIds, newMetadataBlockNames, + inheritMetadataBlocksFromParent, inheritFacetsFromParent, jsonBuilder); JsonObject dvData = jsonBuilder.build(); return given() @@ -456,6 +473,8 @@ static Response updateDataverse(String alias, private static void updateDataverseRequestJsonWithMetadataBlocksConfiguration(String[] inputLevelNames, String[] facetIds, String[] metadataBlockNames, + Boolean inheritFacetsFromParent, + Boolean inheritMetadataBlocksFromParent, JsonObjectBuilder objectBuilder) { JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); @@ -478,6 +497,9 @@ private static void updateDataverseRequestJsonWithMetadataBlocksConfiguration(St } metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); } + if (inheritMetadataBlocksFromParent != null) { + metadataBlocksObjectBuilder.add("inheritMetadataBlocksFromParent", inheritMetadataBlocksFromParent); + } if (facetIds != null) { JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); @@ -486,6 +508,9 @@ private static void updateDataverseRequestJsonWithMetadataBlocksConfiguration(St } metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); } + if (inheritFacetsFromParent != null) { + metadataBlocksObjectBuilder.add("inheritFacetsFromParent", inheritFacetsFromParent); + } objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); } @@ -1623,6 +1648,9 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str } static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken, boolean excludeFiles, boolean includeDeaccessioned) { + return getDatasetVersion(persistentId,versionNumber,apiToken,excludeFiles,false,includeDeaccessioned); + } + static Response getDatasetVersion(String persistentId, String versionNumber, String apiToken, boolean excludeFiles,boolean excludeMetadataBlocks, boolean includeDeaccessioned) { return given() .header(API_TOKEN_HTTP_HEADER, apiToken) .queryParam("includeDeaccessioned", includeDeaccessioned) @@ -1630,7 +1658,8 @@ static Response getDatasetVersion(String persistentId, String versionNumber, Str + versionNumber + "?persistentId=" + persistentId - + (excludeFiles ? "&excludeFiles=true" : "")); + + (excludeFiles ? "&excludeFiles=true" : "") + + (excludeMetadataBlocks ? "&excludeMetadataBlocks=true" : "")); } static Response compareDatasetVersions(String persistentId, String versionNumber1, String versionNumber2, String apiToken) { return given() diff --git a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java index bacb231b4d5..89a8f8826ec 100644 --- a/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/pidproviders/PidUtilTest.java @@ -500,4 +500,49 @@ public void testLegacyConfig() throws IOException { assertEquals(pid1String, pid2.asString()); assertEquals("legacy", pid2.getProviderId()); } + + //Tests support for legacy Perma provider - see #10516 + @Test + @JvmSetting(key = JvmSettings.LEGACY_PERMALINK_BASEURL, value = "http://localhost:8080/") + public void testLegacyPermaConfig() throws IOException { + MockitoAnnotations.openMocks(this); + Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Shoulder)).thenReturn("FK2"); + Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Protocol)).thenReturn(PermaLinkPidProvider.PERMA_PROTOCOL); + Mockito.when(settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Authority)).thenReturn("PermaTest"); + + String protocol = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Protocol); + String authority = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Authority); + String shoulder = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.Shoulder); + + //Code mirrors the relevant part of PidProviderFactoryBean + if (protocol != null && authority != null && shoulder != null) { + // This line is different than in PidProviderFactoryBean because here we've + // already added the unmanaged providers, so we can't look for null + if (!PidUtil.getPidProvider(protocol, authority, shoulder).canManagePID()) { + PidProvider legacy = null; + // Try to add a legacy provider + String identifierGenerationStyle = settingsServiceBean + .getValueForKey(SettingsServiceBean.Key.IdentifierGenerationStyle, "random"); + String dataFilePidFormat = settingsServiceBean.getValueForKey(SettingsServiceBean.Key.DataFilePIDFormat, + "DEPENDENT"); + String baseUrl = JvmSettings.LEGACY_PERMALINK_BASEURL.lookupOptional().orElse(SystemConfig.getDataverseSiteUrlStatic()); + legacy = new PermaLinkPidProvider("legacy", "legacy", authority, shoulder, + identifierGenerationStyle, dataFilePidFormat, "", "", baseUrl, + PermaLinkPidProvider.SEPARATOR); + if (legacy != null) { + // Not testing parts that require this bean + legacy.setPidProviderServiceBean(null); + PidUtil.addToProviderList(legacy); + } + } else { + System.out.println("Legacy PID provider settings found - ignored since a provider for the same protocol, authority, shoulder has been registered"); + } + + } + //Is a perma PID with the default "" separator recognized? + String pid1String = "perma:PermaTestFK2ABCDEF"; + GlobalId pid2 = PidUtil.parseAsGlobalID(pid1String); + assertEquals(pid1String, pid2.asString()); + assertEquals("legacy", pid2.getProviderId()); + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java index 8e24c546556..2b54a4b12cd 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/IndexServiceBeanTest.java @@ -1,18 +1,29 @@ package edu.harvard.iq.dataverse.search; -import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.ControlledVocabularyValue; +import edu.harvard.iq.dataverse.Dataset; +import edu.harvard.iq.dataverse.DatasetField; +import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; +import edu.harvard.iq.dataverse.DatasetFieldConstant; +import edu.harvard.iq.dataverse.DatasetFieldServiceBean; +import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.DatasetFieldValue; +import edu.harvard.iq.dataverse.DatasetVersion; +import edu.harvard.iq.dataverse.DatasetVersionFilesServiceBean; +import edu.harvard.iq.dataverse.DatasetVersionServiceBean; +import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.Dataverse.DataverseType; +import edu.harvard.iq.dataverse.DataverseServiceBean; +import edu.harvard.iq.dataverse.GlobalId; +import edu.harvard.iq.dataverse.MetadataBlock; import edu.harvard.iq.dataverse.branding.BrandingUtil; import edu.harvard.iq.dataverse.dataset.DatasetType; import edu.harvard.iq.dataverse.mocks.MocksFactory; import edu.harvard.iq.dataverse.pidproviders.doi.AbstractDOIProvider; -import edu.harvard.iq.dataverse.settings.JvmSettings; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.SystemConfig; -import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; import org.apache.solr.client.solrj.SolrServerException; -import org.apache.solr.client.solrj.impl.HttpSolrClient; import org.apache.solr.common.SolrInputDocument; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -23,11 +34,15 @@ import org.mockito.junit.jupiter.MockitoExtension; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collections; +import java.util.LinkedList; +import java.util.List; +import java.util.Optional; +import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; -import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @LocalJvmSettings @@ -42,7 +57,7 @@ public class IndexServiceBeanTest { private SettingsServiceBean settingsService; @InjectMocks private SystemConfig systemConfig = new SystemConfig(); - + @BeforeEach public void setUp() { dataverse = MocksFactory.makeDataverse(); @@ -58,36 +73,6 @@ public void setUp() { Mockito.when(indexService.dataverseService.findRootDataverse()).thenReturn(dataverse); } - - @Test - public void testInitWithDefaults() { - // given - String url = "http://localhost:8983/solr/collection1"; - - // when - indexService.init(); - - // then - HttpSolrClient client = (HttpSolrClient) indexService.solrServer; - assertEquals(url, client.getBaseURL()); - } - - - @Test - @JvmSetting(key = JvmSettings.SOLR_HOST, value = "foobar") - @JvmSetting(key = JvmSettings.SOLR_PORT, value = "1234") - @JvmSetting(key = JvmSettings.SOLR_CORE, value = "test") - void testInitWithConfig() { - // given - String url = "http://foobar:1234/solr/test"; - - // when - indexService.init(); - - // then - HttpSolrClient client = (HttpSolrClient) indexService.solrServer; - assertEquals(url, client.getBaseURL()); - } @Test public void TestIndexing() throws SolrServerException, IOException { @@ -129,6 +114,7 @@ public void testValidateBoundingBox() throws SolrServerException, IOException { assertTrue(!doc.get().containsKey("geolocation")); assertTrue(!doc.get().containsKey("boundingBox")); } + private DatasetField constructBoundingBoxValue(String datasetFieldTypeName, String value) { DatasetField retVal = new DatasetField(); retVal.setDatasetFieldType(new DatasetFieldType(datasetFieldTypeName, DatasetFieldType.FieldType.TEXT, false)); diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientIndexServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientIndexServiceTest.java new file mode 100644 index 00000000000..d3d68fa5f6a --- /dev/null +++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientIndexServiceTest.java @@ -0,0 +1,72 @@ +package edu.harvard.iq.dataverse.search; + +import edu.harvard.iq.dataverse.settings.JvmSettings; +import edu.harvard.iq.dataverse.settings.SettingsServiceBean; +import edu.harvard.iq.dataverse.util.SystemConfig; +import edu.harvard.iq.dataverse.util.testing.JvmSetting; +import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; + +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.impl.ConcurrentUpdateHttp2SolrClient; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; + + +@LocalJvmSettings +@ExtendWith(MockitoExtension.class) +class SolrClientIndexServiceTest { + + @Mock + SettingsServiceBean settingsServiceBean; + + @InjectMocks + SystemConfig systemConfig; + + SolrClientIndexService clientService = new SolrClientIndexService(); + + @BeforeEach + void setUp() { + clientService.systemConfig = systemConfig; + } + + @Test + void testInitWithDefaults() { + // given + String url = "http://localhost:8983/solr/collection1"; + + // when + clientService.init(); + + // then + SolrClient client = clientService.getSolrClient(); + assertNotNull(client); + assertInstanceOf(ConcurrentUpdateHttp2SolrClient.class, client); + assertEquals(url, clientService.getSolrUrl()); + } + + @Test + @JvmSetting(key = JvmSettings.SOLR_HOST, value = "foobar") + @JvmSetting(key = JvmSettings.SOLR_PORT, value = "1234") + @JvmSetting(key = JvmSettings.SOLR_CORE, value = "test") + void testInitWithConfig() { + // given + String url = "http://foobar:1234/solr/test"; + + // when + clientService.init(); + + // then + SolrClient client = clientService.getSolrClient(); + assertNotNull(client); + assertInstanceOf(ConcurrentUpdateHttp2SolrClient.class, client); + assertEquals(url, clientService.getSolrUrl()); + } +} \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java index 72eafcd763c..13cea4151ff 100644 --- a/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/search/SolrClientServiceTest.java @@ -5,7 +5,9 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import edu.harvard.iq.dataverse.util.testing.JvmSetting; import edu.harvard.iq.dataverse.util.testing.LocalJvmSettings; -import org.apache.solr.client.solrj.impl.HttpSolrClient; + +import org.apache.solr.client.solrj.SolrClient; +import org.apache.solr.client.solrj.impl.Http2SolrClient; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -14,35 +16,40 @@ import org.mockito.junit.jupiter.MockitoExtension; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertInstanceOf; +import static org.junit.jupiter.api.Assertions.assertNotNull; + @LocalJvmSettings @ExtendWith(MockitoExtension.class) class SolrClientServiceTest { - + @Mock SettingsServiceBean settingsServiceBean; @InjectMocks SystemConfig systemConfig; SolrClientService clientService = new SolrClientService(); - + @BeforeEach void setUp() { clientService.systemConfig = systemConfig; } - + @Test void testInitWithDefaults() { // given String url = "http://localhost:8983/solr/collection1"; - + // when clientService.init(); - + // then - HttpSolrClient client = (HttpSolrClient) clientService.getSolrClient(); - assertEquals(url, client.getBaseURL()); + SolrClient client = clientService.getSolrClient(); + assertNotNull(client); + assertInstanceOf(Http2SolrClient.class, client); + assertEquals(url, clientService.getSolrUrl()); } - + @Test @JvmSetting(key = JvmSettings.SOLR_HOST, value = "foobar") @JvmSetting(key = JvmSettings.SOLR_PORT, value = "1234") @@ -50,12 +57,14 @@ void testInitWithDefaults() { void testInitWithConfig() { // given String url = "http://foobar:1234/solr/test"; - + // when clientService.init(); - + // then - HttpSolrClient client = (HttpSolrClient) clientService.getSolrClient(); - assertEquals(url, client.getBaseURL()); + SolrClient client = clientService.getSolrClient(); + assertNotNull(client); + assertInstanceOf(Http2SolrClient.class, client); + assertEquals(url, clientService.getSolrUrl()); } } \ No newline at end of file diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java index 7ec8e0b25f3..1987307637c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonPrinterTest.java @@ -25,6 +25,7 @@ import jakarta.json.JsonString; import edu.harvard.iq.dataverse.util.BundleUtil; +import org.assertj.core.util.Lists; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.BeforeEach; @@ -268,6 +269,54 @@ public void testDatasetContactWithPrivacy() { } + @Test + public void testDatasetFieldTypesWithChildren() { + MetadataBlock block = new MetadataBlock(); + block.setId(0L); + block.setName("citation"); + long id = 0L; + // create datasetFieldTypes + List datasetFieldTypes = new ArrayList<>(); + for (int i = 0; i < 10; i++) { + DatasetFieldType dft = new DatasetFieldType(); + dft.setId(id++); + dft.setDisplayOrder(i); + dft.setMetadataBlock(block); + dft.setFieldType(FieldType.TEXT); + dft.setName("subType" + dft.getId()); + dft.setTitle(dft.getName()); + dft.setChildDatasetFieldTypes(Lists.emptyList()); + datasetFieldTypes.add(dft); + } + // add DatasetFieldType as children to another DatasetFieldType to test the suppression of duplicate data + // adding 3 and 4 as children of 2 + datasetFieldTypes.get(3).setParentDatasetFieldType(datasetFieldTypes.get(2)); + datasetFieldTypes.get(4).setParentDatasetFieldType(datasetFieldTypes.get(2)); + datasetFieldTypes.get(2).setChildDatasetFieldTypes(List.of(datasetFieldTypes.get(3), datasetFieldTypes.get(4))); + // adding 6 as child of 9 + datasetFieldTypes.get(6).setParentDatasetFieldType(datasetFieldTypes.get(9)); + datasetFieldTypes.get(9).setChildDatasetFieldTypes(List.of(datasetFieldTypes.get(6))); + + block.setDatasetFieldTypes(datasetFieldTypes); + + DatasetFieldServiceBean nullDFServiceBean = null; + JsonPrinter.injectSettingsService(new MockSettingsSvc(), nullDFServiceBean); + + JsonObject jsonObject = JsonPrinter.json(block).build(); + assertNotNull(jsonObject); + + System.out.println("json: " + JsonUtil.prettyPrint(jsonObject.toString())); + assertEquals("subType2 subType3", jsonObject.getJsonObject("fields").getJsonObject("subType2") + .getJsonObject("childFields").getJsonObject("subType3").getString("displayName")); + assertEquals("subType2 subType4", jsonObject.getJsonObject("fields").getJsonObject("subType2") + .getJsonObject("childFields").getJsonObject("subType4").getString("displayName")); + assertEquals("subType9 subType6", jsonObject.getJsonObject("fields").getJsonObject("subType9") + .getJsonObject("childFields").getJsonObject("subType6").getString("displayName")); + assertNull(jsonObject.getJsonObject("fields").getJsonObject("subType3")); + assertNull(jsonObject.getJsonObject("fields").getJsonObject("subType4")); + assertNull(jsonObject.getJsonObject("fields").getJsonObject("subType6")); + } + @Test public void testDataversePrinter() { Dataverse dataverse = new Dataverse();