diff --git a/.gitignore b/.gitignore
index a9733538f7c..514f82116de 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,6 +34,7 @@ oauth-credentials.md
/src/main/webapp/oauth2/newAccount.html
scripts/api/setup-all.sh*
scripts/api/setup-all.*.log
+src/main/resources/edu/harvard/iq/dataverse/openapi/
# ctags generated tag file
tags
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index b0864a0c55f..00000000000
--- a/Dockerfile
+++ /dev/null
@@ -1 +0,0 @@
-# See http://guides.dataverse.org/en/latest/developers/containers.html
diff --git a/README.md b/README.md
index 651d0352dec..77720453d5f 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@ Dataverse is an [open source][] software platform for sharing, finding, citing,
We maintain a demo site at [demo.dataverse.org][] which you are welcome to use for testing and evaluating Dataverse.
-To install Dataverse, please see our [Installation Guide][] which will prompt you to download our [latest release][].
+To install Dataverse, please see our [Installation Guide][] which will prompt you to download our [latest release][]. Docker users should consult the [Container Guide][].
To discuss Dataverse with the community, please join our [mailing list][], participate in a [community call][], chat with us at [chat.dataverse.org][], or attend our annual [Dataverse Community Meeting][].
@@ -28,6 +28,7 @@ Dataverse is a trademark of President and Fellows of Harvard College and is regi
[Dataverse community]: https://dataverse.org/developers
[Installation Guide]: https://guides.dataverse.org/en/latest/installation/index.html
[latest release]: https://github.com/IQSS/dataverse/releases
+[Container Guide]: https://guides.dataverse.org/en/latest/container/index.html
[features]: https://dataverse.org/software-features
[project board]: https://github.com/orgs/IQSS/projects/34
[roadmap]: https://www.iq.harvard.edu/roadmap-dataverse-project
diff --git a/conf/solr/9.3.0/solrconfig.xml b/conf/solr/9.3.0/solrconfig.xml
index 36ed4f23390..34386375fe1 100644
--- a/conf/solr/9.3.0/solrconfig.xml
+++ b/conf/solr/9.3.0/solrconfig.xml
@@ -290,7 +290,7 @@
have some sort of hard autoCommit to limit the log size.
-->
- ${solr.autoCommit.maxTime:15000}
+ ${solr.autoCommit.maxTime:30000}
false
@@ -301,7 +301,7 @@
-->
- ${solr.autoSoftCommit.maxTime:-1}
+ ${solr.autoSoftCommit.maxTime:1000}
+ ${project.build.outputDirectory}/META-INF
+ process-classes
+
+ ${openapi.outputDirectory}
+ openapi
+ ${openapi.infoTitle}
+ ${openapi.infoVersion}
+ ${openapi.infoDescription}
+ CLASS_METHOD
+ edu.harvard.iq.dataverse
+ true
+
+
+
+
@@ -1087,4 +1122,4 @@
-
+
\ No newline at end of file
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
index 2686584f307..dab0ff43fcf 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetServiceBean.java
@@ -19,8 +19,6 @@
import edu.harvard.iq.dataverse.export.ExportService;
import edu.harvard.iq.dataverse.globus.GlobusServiceBean;
import edu.harvard.iq.dataverse.harvest.server.OAIRecordServiceBean;
-import edu.harvard.iq.dataverse.pidproviders.PidProvider;
-import edu.harvard.iq.dataverse.pidproviders.PidUtil;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
@@ -41,11 +39,10 @@
import jakarta.ejb.TransactionAttributeType;
import jakarta.inject.Named;
import jakarta.persistence.EntityManager;
-import jakarta.persistence.LockModeType;
import jakarta.persistence.NoResultException;
+import jakarta.persistence.NonUniqueResultException;
import jakarta.persistence.PersistenceContext;
import jakarta.persistence.Query;
-import jakarta.persistence.StoredProcedureQuery;
import jakarta.persistence.TypedQuery;
import org.apache.commons.lang3.StringUtils;
@@ -115,28 +112,32 @@ public Dataset find(Object pk) {
* @return a dataset with pre-fetched file objects
*/
public Dataset findDeep(Object pk) {
- return (Dataset) em.createNamedQuery("Dataset.findById")
- .setParameter("id", pk)
- // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files
- .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest")
- .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset")
- .setHint("eclipselink.left-join-fetch", "o.files.dataTables")
- .setHint("eclipselink.left-join-fetch", "o.files.auxiliaryFiles")
- .setHint("eclipselink.left-join-fetch", "o.files.ingestReports")
- .setHint("eclipselink.left-join-fetch", "o.files.dataFileTags")
- .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas")
- .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.fileCategories")
- .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.varGroups")
- //.setHint("eclipselink.left-join-fetch", "o.files.guestbookResponses
- .setHint("eclipselink.left-join-fetch", "o.files.embargo")
- .setHint("eclipselink.left-join-fetch", "o.files.retention")
- .setHint("eclipselink.left-join-fetch", "o.files.fileAccessRequests")
- .setHint("eclipselink.left-join-fetch", "o.files.owner")
- .setHint("eclipselink.left-join-fetch", "o.files.releaseUser")
- .setHint("eclipselink.left-join-fetch", "o.files.creator")
- .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers")
- .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments")
- .getSingleResult();
+ try {
+ return (Dataset) em.createNamedQuery("Dataset.findById")
+ .setParameter("id", pk)
+ // Optimization hints: retrieve all data in one query; this prevents point queries when iterating over the files
+ .setHint("eclipselink.left-join-fetch", "o.files.ingestRequest")
+ .setHint("eclipselink.left-join-fetch", "o.files.thumbnailForDataset")
+ .setHint("eclipselink.left-join-fetch", "o.files.dataTables")
+ .setHint("eclipselink.left-join-fetch", "o.files.auxiliaryFiles")
+ .setHint("eclipselink.left-join-fetch", "o.files.ingestReports")
+ .setHint("eclipselink.left-join-fetch", "o.files.dataFileTags")
+ .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas")
+ .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.fileCategories")
+ .setHint("eclipselink.left-join-fetch", "o.files.fileMetadatas.varGroups")
+ //.setHint("eclipselink.left-join-fetch", "o.files.guestbookResponses
+ .setHint("eclipselink.left-join-fetch", "o.files.embargo")
+ .setHint("eclipselink.left-join-fetch", "o.files.retention")
+ .setHint("eclipselink.left-join-fetch", "o.files.fileAccessRequests")
+ .setHint("eclipselink.left-join-fetch", "o.files.owner")
+ .setHint("eclipselink.left-join-fetch", "o.files.releaseUser")
+ .setHint("eclipselink.left-join-fetch", "o.files.creator")
+ .setHint("eclipselink.left-join-fetch", "o.files.alternativePersistentIndentifiers")
+ .setHint("eclipselink.left-join-fetch", "o.files.roleAssignments")
+ .getSingleResult();
+ } catch (NoResultException | NonUniqueResultException ex) {
+ return null;
+ }
}
public List findByOwnerId(Long ownerId) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java
index 9889d23cf55..afede00f3eb 100644
--- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java
@@ -522,10 +522,9 @@ public String ingestFile() throws CommandException{
return null;
}
- DataFile dataFile = fileMetadata.getDataFile();
- editDataset = dataFile.getOwner();
+ editDataset = file.getOwner();
- if (dataFile.isTabularData()) {
+ if (file.isTabularData()) {
JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.alreadyIngestedWarning"));
return null;
}
@@ -537,25 +536,25 @@ public String ingestFile() throws CommandException{
return null;
}
- if (!FileUtil.canIngestAsTabular(dataFile)) {
+ if (!FileUtil.canIngestAsTabular(file)) {
JH.addMessage(FacesMessage.SEVERITY_WARN, BundleUtil.getStringFromBundle("file.ingest.cantIngestFileWarning"));
return null;
}
- dataFile.SetIngestScheduled();
+ file.SetIngestScheduled();
- if (dataFile.getIngestRequest() == null) {
- dataFile.setIngestRequest(new IngestRequest(dataFile));
+ if (file.getIngestRequest() == null) {
+ file.setIngestRequest(new IngestRequest(file));
}
- dataFile.getIngestRequest().setForceTypeCheck(true);
+ file.getIngestRequest().setForceTypeCheck(true);
// update the datafile, to save the newIngest request in the database:
datafileService.save(file);
// queue the data ingest job for asynchronous execution:
- String status = ingestService.startIngestJobs(editDataset.getId(), new ArrayList<>(Arrays.asList(dataFile)), (AuthenticatedUser) session.getUser());
+ String status = ingestService.startIngestJobs(editDataset.getId(), new ArrayList<>(Arrays.asList(file)), (AuthenticatedUser) session.getUser());
if (!StringUtil.isEmpty(status)) {
// This most likely indicates some sort of a problem (for example,
@@ -565,9 +564,9 @@ public String ingestFile() throws CommandException{
// successfully gone through the process of trying to schedule the
// ingest job...
- logger.warning("Ingest Status for file: " + dataFile.getId() + " : " + status);
+ logger.warning("Ingest Status for file: " + file.getId() + " : " + status);
}
- logger.fine("File: " + dataFile.getId() + " ingest queued");
+ logger.fine("File: " + file.getId() + " ingest queued");
init();
JsfHelper.addInfoMessage(BundleUtil.getStringFromBundle("file.ingest.ingestQueued"));
diff --git a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
index 5a522eb7e45..46941c8b5b6 100644
--- a/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
+++ b/src/main/java/edu/harvard/iq/dataverse/SendFeedbackDialog.java
@@ -129,6 +129,10 @@ public void setUserSum(Long userSum) {
}
public String getMessageTo() {
+ if (op1 == null || op2 == null) {
+ // Fix for 403 error page: initUserInput method doesn't call before
+ initUserInput(null);
+ }
if (feedbackTarget == null) {
return BrandingUtil.getSupportTeamName(systemAddress);
} else if (feedbackTarget.isInstanceofDataverse()) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
index e95500426c0..00da4990996 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
@@ -130,6 +130,14 @@
import jakarta.ws.rs.core.MediaType;
import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
import static jakarta.ws.rs.core.Response.Status.UNAUTHORIZED;
+
+import org.eclipse.microprofile.openapi.annotations.Operation;
+import org.eclipse.microprofile.openapi.annotations.media.Content;
+import org.eclipse.microprofile.openapi.annotations.media.Schema;
+import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
+import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
+import org.eclipse.microprofile.openapi.annotations.responses.APIResponses;
+import org.eclipse.microprofile.openapi.annotations.tags.Tag;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataParam;
@@ -1248,6 +1256,20 @@ private String getWebappImageResource(String imageName) {
@AuthRequired
@Path("datafile/{fileId}/auxiliary/{formatTag}/{formatVersion}")
@Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces("application/json")
+ @Operation(summary = "Save auxiliary file with version",
+ description = "Saves an auxiliary file")
+ @APIResponses(value = {
+ @APIResponse(responseCode = "200",
+ description = "File saved response"),
+ @APIResponse(responseCode = "403",
+ description = "User not authorized to edit the dataset."),
+ @APIResponse(responseCode = "400",
+ description = "File not found based on id.")
+ })
+ @Tag(name = "saveAuxiliaryFileWithVersion",
+ description = "Save Auxiliary File With Version")
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response saveAuxiliaryFileWithVersion(@Context ContainerRequestContext crc,
@PathParam("fileId") Long fileId,
@PathParam("formatTag") String formatTag,
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
index 802904b5173..154fa2350bd 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Admin.java
@@ -201,7 +201,7 @@ public Response putSetting(@PathParam("name") String name, String content) {
@Path("settings/{name}/lang/{lang}")
@PUT
- public Response putSetting(@PathParam("name") String name, @PathParam("lang") String lang, String content) {
+ public Response putSettingLang(@PathParam("name") String name, @PathParam("lang") String lang, String content) {
Setting s = settingsSvc.set(name, lang, content);
return ok("Setting " + name + " - " + lang + " - added.");
}
@@ -224,7 +224,7 @@ public Response deleteSetting(@PathParam("name") String name) {
@Path("settings/{name}/lang/{lang}")
@DELETE
- public Response deleteSetting(@PathParam("name") String name, @PathParam("lang") String lang) {
+ public Response deleteSettingLang(@PathParam("name") String name, @PathParam("lang") String lang) {
settingsSvc.delete(name, lang);
return ok("Setting " + name + " - " + lang + " deleted.");
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
index 50862bc0d35..ba99cf33c5b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/BuiltinUsers.java
@@ -119,7 +119,7 @@ public Response create(BuiltinUser user, @PathParam("password") String password,
*/
@POST
@Path("{password}/{key}/{sendEmailNotification}")
- public Response create(BuiltinUser user, @PathParam("password") String password, @PathParam("key") String key, @PathParam("sendEmailNotification") Boolean sendEmailNotification) {
+ public Response createWithNotification(BuiltinUser user, @PathParam("password") String password, @PathParam("key") String key, @PathParam("sendEmailNotification") Boolean sendEmailNotification) {
return internalSave(user, password, key, sendEmailNotification);
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
index 1befb3869c3..fc0afc562fc 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Datasets.java
@@ -1,6 +1,7 @@
package edu.harvard.iq.dataverse.api;
import com.amazonaws.services.s3.model.PartETag;
+
import edu.harvard.iq.dataverse.*;
import edu.harvard.iq.dataverse.DatasetLock.Reason;
import edu.harvard.iq.dataverse.actionlogging.ActionLogRecord;
@@ -66,6 +67,12 @@
import jakarta.ws.rs.core.*;
import jakarta.ws.rs.core.Response.Status;
import org.apache.commons.lang3.StringUtils;
+import org.eclipse.microprofile.openapi.annotations.Operation;
+import org.eclipse.microprofile.openapi.annotations.media.Content;
+import org.eclipse.microprofile.openapi.annotations.media.Schema;
+import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
+import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
+import org.eclipse.microprofile.openapi.annotations.tags.Tag;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
@@ -796,7 +803,7 @@ public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @
@AuthRequired
@Path("{id}/metadata")
@Produces("application/ld+json, application/json-ld")
- public Response getVersionJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
+ public Response getJsonLDMetadata(@Context ContainerRequestContext crc, @PathParam("id") String id, @Context UriInfo uriInfo, @Context HttpHeaders headers) {
return getVersionJsonLDMetadata(crc, id, DS_VERSION_LATEST, uriInfo, headers);
}
@@ -2261,6 +2268,14 @@ public Response setDataFileAsThumbnail(@Context ContainerRequestContext crc, @Pa
@AuthRequired
@Path("{id}/thumbnail")
@Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces("application/json")
+ @Operation(summary = "Uploads a logo for a dataset",
+ description = "Uploads a logo for a dataset")
+ @APIResponse(responseCode = "200",
+ description = "Dataset logo uploaded successfully")
+ @Tag(name = "uploadDatasetLogo",
+ description = "Uploads a logo for a dataset")
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response uploadDatasetLogo(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied, @FormDataParam("file") InputStream inputStream) {
try {
DatasetThumbnail datasetThumbnail = execCommand(new UpdateDatasetThumbnailCommand(createDataverseRequest(getRequestUser(crc)), findDatasetOrDie(idSupplied), UpdateDatasetThumbnailCommand.UserIntent.setNonDatasetFileAsThumbnail, null, inputStream));
@@ -2733,6 +2748,14 @@ public Response completeMPUpload(@Context ContainerRequestContext crc, String pa
@AuthRequired
@Path("{id}/add")
@Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces("application/json")
+ @Operation(summary = "Uploads a file for a dataset",
+ description = "Uploads a file for a dataset")
+ @APIResponse(responseCode = "200",
+ description = "File uploaded successfully to dataset")
+ @Tag(name = "addFileToDataset",
+ description = "Uploads a file for a dataset")
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response addFileToDataset(@Context ContainerRequestContext crc,
@PathParam("id") String idSupplied,
@FormDataParam("jsonData") String jsonData,
@@ -3958,6 +3981,14 @@ public Response requestGlobusUpload(@Context ContainerRequestContext crc, @PathP
@AuthRequired
@Path("{id}/addGlobusFiles")
@Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces("application/json")
+ @Operation(summary = "Uploads a Globus file for a dataset",
+ description = "Uploads a Globus file for a dataset")
+ @APIResponse(responseCode = "200",
+ description = "Globus file uploaded successfully to dataset")
+ @Tag(name = "addGlobusFilesToDataset",
+ description = "Uploads a Globus file for a dataset")
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response addGlobusFilesToDataset(@Context ContainerRequestContext crc,
@PathParam("id") String datasetId,
@FormDataParam("jsonData") String jsonData,
@@ -4340,6 +4371,14 @@ public Response monitorGlobusDownload(@Context ContainerRequestContext crc, @Pat
@AuthRequired
@Path("{id}/addFiles")
@Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces("application/json")
+ @Operation(summary = "Uploads a set of files to a dataset",
+ description = "Uploads a set of files to a dataset")
+ @APIResponse(responseCode = "200",
+ description = "Files uploaded successfully to dataset")
+ @Tag(name = "addFilesToDataset",
+ description = "Uploads a set of files to a dataset")
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathParam("id") String idSupplied,
@FormDataParam("jsonData") String jsonData) {
@@ -4407,6 +4446,14 @@ public Response addFilesToDataset(@Context ContainerRequestContext crc, @PathPar
@AuthRequired
@Path("{id}/replaceFiles")
@Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces("application/json")
+ @Operation(summary = "Replace a set of files to a dataset",
+ description = "Replace a set of files to a dataset")
+ @APIResponse(responseCode = "200",
+ description = "Files replaced successfully to dataset")
+ @Tag(name = "replaceFilesInDataset",
+ description = "Replace a set of files to a dataset")
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response replaceFilesInDataset(@Context ContainerRequestContext crc,
@PathParam("id") String idSupplied,
@FormDataParam("jsonData") String jsonData) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Files.java b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
index 2d48322c90e..d786aab35a8 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Files.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Files.java
@@ -64,6 +64,13 @@
import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
import jakarta.ws.rs.core.UriInfo;
+
+import org.eclipse.microprofile.openapi.annotations.Operation;
+import org.eclipse.microprofile.openapi.annotations.media.Content;
+import org.eclipse.microprofile.openapi.annotations.media.Schema;
+import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
+import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
+import org.eclipse.microprofile.openapi.annotations.tags.Tag;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
import org.glassfish.jersey.media.multipart.FormDataParam;
@@ -176,6 +183,14 @@ public Response restrictFileInDataset(@Context ContainerRequestContext crc, @Pat
@AuthRequired
@Path("{id}/replace")
@Consumes(MediaType.MULTIPART_FORM_DATA)
+ @Produces("application/json")
+ @Operation(summary = "Replace a file on a dataset",
+ description = "Replace a file to a dataset")
+ @APIResponse(responseCode = "200",
+ description = "File replaced successfully on the dataset")
+ @Tag(name = "replaceFilesInDataset",
+ description = "Replace a file to a dataset")
+ @RequestBody(content = @Content(mediaType = MediaType.MULTIPART_FORM_DATA))
public Response replaceFileInDataset(
@Context ContainerRequestContext crc,
@PathParam("id") String fileIdOrPersistentId,
@@ -497,7 +512,7 @@ public Response getFileData(@Context ContainerRequestContext crc,
@GET
@AuthRequired
@Path("{id}/versions/{datasetVersionId}")
- public Response getFileData(@Context ContainerRequestContext crc,
+ public Response getFileDataForVersion(@Context ContainerRequestContext crc,
@PathParam("id") String fileIdOrPersistentId,
@PathParam("datasetVersionId") String datasetVersionId,
@QueryParam("includeDeaccessioned") boolean includeDeaccessioned,
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
index d56a787c7ff..ed996b8ecf9 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Groups.java
@@ -88,8 +88,8 @@ public Response postIpGroup( JsonObject dto ){
* that group from being created.
*/
@PUT
- @Path("ip/{groupName}")
- public Response putIpGroups( @PathParam("groupName") String groupName, JsonObject dto ){
+ @Path("ip/{group}")
+ public Response putIpGroups( @PathParam("group") String groupName, JsonObject dto ){
try {
if ( groupName == null || groupName.trim().isEmpty() ) {
return badRequest("Group name cannot be empty");
@@ -118,8 +118,8 @@ public Response listIpGroups() {
}
@GET
- @Path("ip/{groupIdtf}")
- public Response getIpGroup( @PathParam("groupIdtf") String groupIdtf ) {
+ @Path("ip/{group}")
+ public Response getIpGroup( @PathParam("group") String groupIdtf ) {
IpGroup grp;
if ( isNumeric(groupIdtf) ) {
grp = ipGroupPrv.get( Long.parseLong(groupIdtf) );
@@ -131,8 +131,8 @@ public Response getIpGroup( @PathParam("groupIdtf") String groupIdtf ) {
}
@DELETE
- @Path("ip/{groupIdtf}")
- public Response deleteIpGroup( @PathParam("groupIdtf") String groupIdtf ) {
+ @Path("ip/{group}")
+ public Response deleteIpGroup( @PathParam("group") String groupIdtf ) {
IpGroup grp;
if ( isNumeric(groupIdtf) ) {
grp = ipGroupPrv.get( Long.parseLong(groupIdtf) );
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Info.java b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
index 40ce6cd25b7..257519677d3 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Info.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Info.java
@@ -1,16 +1,35 @@
package edu.harvard.iq.dataverse.api;
+import java.io.FileInputStream;
+import java.io.InputStream;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.util.Arrays;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import jakarta.ws.rs.Produces;
+import org.apache.commons.io.IOUtils;
+
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
+import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
import jakarta.ejb.EJB;
import jakarta.json.Json;
import jakarta.json.JsonValue;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
+import jakarta.ws.rs.PathParam;
+import jakarta.ws.rs.core.MediaType;
import jakarta.ws.rs.core.Response;
+import org.eclipse.microprofile.openapi.annotations.Operation;
+import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
+import org.eclipse.microprofile.openapi.annotations.tags.Tag;
@Path("info")
+@Tag(name = "info", description = "General information about the Dataverse installation.")
public class Info extends AbstractApiBean {
@EJB
@@ -19,6 +38,8 @@ public class Info extends AbstractApiBean {
@EJB
SystemConfig systemConfig;
+ private static final Logger logger = Logger.getLogger(Info.class.getCanonicalName());
+
@GET
@Path("settings/:DatasetPublishPopupCustomText")
public Response getDatasetPublishPopupCustomText() {
@@ -33,6 +54,9 @@ public Response getMaxEmbargoDurationInMonths() {
@GET
@Path("version")
+ @Operation(summary = "Get version and build information", description = "Get version and build information")
+ @APIResponse(responseCode = "200",
+ description = "Version and build information")
public Response getInfo() {
String versionStr = systemConfig.getVersion(true);
String[] comps = versionStr.split("build",2);
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
index b9db44b2671..46747b50c29 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/TestApi.java
@@ -21,7 +21,7 @@ public class TestApi extends AbstractApiBean {
@GET
@Path("datasets/{id}/externalTools")
- public Response getExternalToolsforFile(@PathParam("id") String idSupplied, @QueryParam("type") String typeSupplied) {
+ public Response getDatasetExternalToolsforFile(@PathParam("id") String idSupplied, @QueryParam("type") String typeSupplied) {
ExternalTool.Type type;
try {
type = ExternalTool.Type.fromString(typeSupplied);
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Users.java b/src/main/java/edu/harvard/iq/dataverse/api/Users.java
index 791fc7aa774..1f5430340c2 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Users.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Users.java
@@ -234,7 +234,7 @@ public Response getTraces(@Context ContainerRequestContext crc, @PathParam("iden
@AuthRequired
@Path("{identifier}/traces/{element}")
@Produces("text/csv, application/json")
- public Response getTraces(@Context ContainerRequestContext crc, @Context Request req, @PathParam("identifier") String identifier, @PathParam("element") String element) {
+ public Response getTracesElement(@Context ContainerRequestContext crc, @Context Request req, @PathParam("identifier") String identifier, @PathParam("element") String element) {
try {
AuthenticatedUser userToQuery = authSvc.getAuthenticatedUser(identifier);
if(!elements.contains(element)) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
index 8d5024c1c14..15478aacff7 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/WorkflowsAdmin.java
@@ -111,9 +111,9 @@ public Response deleteDefault(@PathParam("triggerType") String triggerType) {
}
}
- @Path("/{identifier}")
+ @Path("/{id}")
@GET
- public Response getWorkflow(@PathParam("identifier") String identifier ) {
+ public Response getWorkflow(@PathParam("id") String identifier ) {
try {
long idtf = Long.parseLong(identifier);
return workflows.getWorkflow(idtf)
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java
index e4edb973cd9..121af765737 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AssignRoleCommand.java
@@ -3,7 +3,6 @@
*/
package edu.harvard.iq.dataverse.engine.command.impl;
-import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
@@ -18,6 +17,8 @@
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
@@ -68,11 +69,22 @@ public RoleAssignment execute(CommandContext ctxt) throws CommandException {
throw new IllegalCommandException("User " + user.getUserIdentifier() + " is deactivated and cannot be given a role.", this);
}
}
+ if(isExistingRole(ctxt)){
+ throw new IllegalCommandException(BundleUtil.getStringFromBundle("datasets.api.grant.role.assignee.has.role.error"), this);
+ }
// TODO make sure the role is defined on the dataverse.
RoleAssignment roleAssignment = new RoleAssignment(role, grantee, defPoint, privateUrlToken, anonymizedAccess);
return ctxt.roles().save(roleAssignment);
}
+ private boolean isExistingRole(CommandContext ctxt) {
+ return ctxt.roles()
+ .directRoleAssignments(grantee, defPoint)
+ .stream()
+ .map(RoleAssignment::getRole)
+ .anyMatch(it -> it.equals(role));
+ }
+
@Override
public Map> getRequiredPermissions() {
// for data file check permission on owning dataset
diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java
index 1ec51764d73..03f4dceef88 100644
--- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java
+++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/MergeInAccountCommand.java
@@ -14,7 +14,6 @@
import edu.harvard.iq.dataverse.UserNotification;
import edu.harvard.iq.dataverse.authorization.AuthenticatedUserLookup;
import edu.harvard.iq.dataverse.authorization.providers.builtin.BuiltinUser;
-import edu.harvard.iq.dataverse.authorization.providers.oauth2.OAuth2TokenData;
import edu.harvard.iq.dataverse.authorization.users.ApiToken;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.batch.util.LoggingUtil;
@@ -25,7 +24,6 @@
import edu.harvard.iq.dataverse.engine.command.RequiredPermissions;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
-import edu.harvard.iq.dataverse.passwordreset.PasswordResetData;
import edu.harvard.iq.dataverse.search.IndexResponse;
import edu.harvard.iq.dataverse.search.savedsearch.SavedSearch;
import edu.harvard.iq.dataverse.workflows.WorkflowComment;
@@ -177,6 +175,7 @@ protected void executeImpl(CommandContext ctxt) throws CommandException {
ctxt.em().createNativeQuery("Delete from OAuth2TokenData where user_id ="+consumedAU.getId()).executeUpdate();
+ ctxt.em().createNativeQuery("DELETE FROM explicitgroup_authenticateduser consumed USING explicitgroup_authenticateduser ongoing WHERE consumed.containedauthenticatedusers_id="+ongoingAU.getId()+" AND ongoing.containedauthenticatedusers_id="+consumedAU.getId()).executeUpdate();
ctxt.em().createNativeQuery("UPDATE explicitgroup_authenticateduser SET containedauthenticatedusers_id="+ongoingAU.getId()+" WHERE containedauthenticatedusers_id="+consumedAU.getId()).executeUpdate();
ctxt.actionLog().changeUserIdentifierInHistory(consumedAU.getIdentifier(), ongoingAU.getIdentifier());
diff --git a/src/main/java/edu/harvard/iq/dataverse/openapi/OpenApi.java b/src/main/java/edu/harvard/iq/dataverse/openapi/OpenApi.java
new file mode 100644
index 00000000000..6bd54916e0d
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/openapi/OpenApi.java
@@ -0,0 +1,101 @@
+package edu.harvard.iq.dataverse.openapi;
+
+import java.io.*;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.util.*;
+import java.util.logging.*;
+
+import jakarta.json.Json;
+import jakarta.json.JsonObject;
+import jakarta.servlet.ServletException;
+import jakarta.servlet.annotation.WebServlet;
+import jakarta.servlet.http.*;
+import jakarta.ws.rs.core.*;
+import org.apache.commons.io.IOUtils;
+import edu.harvard.iq.dataverse.api.Info;
+import edu.harvard.iq.dataverse.util.BundleUtil;
+
+@WebServlet("/openapi")
+public class OpenApi extends HttpServlet {
+
+ private static final Logger logger = Logger.getLogger(Info.class.getCanonicalName());
+
+ private static final String YAML_FORMAT = "yaml";
+ private static final String JSON_FORMAT = "json";
+
+
+ @Override
+ protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+
+
+ String format = req.getParameter("format");
+ String accept = req.getHeader("Accept");
+
+ /*
+ * We first check for the headers, if the request accepts application/json
+ * have to check for the format parameter, if it is different from json
+ * return BAD_REQUEST (400)
+ */
+ if (MediaType.APPLICATION_JSON.equals(accept)){
+ if (format != null && !JSON_FORMAT.equals(format)){
+ List args = Arrays.asList(accept, format);
+ String bundleResponse = BundleUtil.getStringFromBundle("openapi.exception.unaligned", args);
+ resp.sendError(Response.Status.BAD_REQUEST.getStatusCode(),
+ bundleResponse);
+ return;
+ } else {
+ format = JSON_FORMAT;
+ }
+ }
+
+ /*
+ * We currently support only JSON or YAML being the second the default
+ * if no format is specified, if a different format is specified we return
+ * UNSUPPORTED_MEDIA_TYPE (415) specifying that the format is not supported
+ */
+
+ format = format == null ? YAML_FORMAT : format.toLowerCase();
+
+ if (JSON_FORMAT.equals(format)) {
+ resp.setContentType(MediaType.APPLICATION_JSON_TYPE.toString());
+ } else if (YAML_FORMAT.equals(format)){
+ resp.setContentType(MediaType.TEXT_PLAIN_TYPE.toString());
+ } else {
+
+ List args = Arrays.asList(format);
+ String bundleResponse = BundleUtil.getStringFromBundle("openapi.exception.invalid.format", args);
+
+ JsonObject errorResponse = Json.createObjectBuilder()
+ .add("status", "ERROR")
+ .add("code", HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE)
+ .add("message", bundleResponse)
+ .build();
+
+ resp.setContentType(MediaType.APPLICATION_JSON_TYPE.toString());
+ resp.setStatus(HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE);
+
+ PrintWriter responseWriter = resp.getWriter();
+ responseWriter.println(errorResponse.toString());
+ responseWriter.flush();
+ return;
+ }
+
+ try {
+ String baseFileName = "/META-INF/openapi." + format;
+ ClassLoader classLoader = this.getClass().getClassLoader();
+ URL aliasesResource = classLoader.getResource(baseFileName);
+ InputStream openapiDefinitionStream = aliasesResource.openStream();
+ String content = IOUtils.toString(openapiDefinitionStream, StandardCharsets.UTF_8);
+ resp.getWriter().write(content);
+ } catch (Exception e) {
+ logger.log(Level.SEVERE, "OpenAPI Definition format not found " + format + ":" + e.getMessage(), e);
+ String bundleResponse = BundleUtil.getStringFromBundle("openapi.exception");
+ resp.sendError(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode(),
+ bundleResponse);
+ }
+
+
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
index e61b93a741f..54d60ee89b1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/IndexServiceBean.java
@@ -12,6 +12,7 @@
import edu.harvard.iq.dataverse.datavariable.VariableMetadataUtil;
import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.harvest.client.HarvestingClient;
+import edu.harvard.iq.dataverse.settings.FeatureFlags;
import edu.harvard.iq.dataverse.settings.JvmSettings;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.FileUtil;
@@ -214,6 +215,9 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths)
solrInputDocument.addField(SearchFields.DATAVERSE_CATEGORY, dataverse.getIndexableCategoryName());
if (dataverse.isReleased()) {
solrInputDocument.addField(SearchFields.PUBLICATION_STATUS, PUBLISHED_STRING);
+ if (FeatureFlags.ADD_PUBLICOBJECT_SOLR_FIELD.enabled()) {
+ solrInputDocument.addField(SearchFields.PUBLIC_OBJECT, true);
+ }
solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataverse.getPublicationDate());
} else {
solrInputDocument.addField(SearchFields.PUBLICATION_STATUS, UNPUBLISHED_STRING);
@@ -308,7 +312,7 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths)
String status;
try {
if (dataverse.getId() != null) {
- solrClientService.getSolrClient().add(docs);
+ solrClientService.getSolrClient().add(docs, COMMIT_WITHIN);
} else {
logger.info("WARNING: indexing of a dataverse with no id attempted");
}
@@ -317,14 +321,6 @@ public Future indexDataverse(Dataverse dataverse, boolean processPaths)
logger.info(status);
return new AsyncResult<>(status);
}
- try {
- solrClientService.getSolrClient().commit();
- } catch (SolrServerException | IOException ex) {
- status = ex.toString();
- logger.info(status);
- return new AsyncResult<>(status);
- }
-
dvObjectService.updateContentIndexTime(dataverse);
IndexResponse indexResponse = solrIndexService.indexPermissionsForOneDvObject(dataverse);
String msg = "indexed dataverse " + dataverse.getId() + ":" + dataverse.getAlias() + ". Response from permission indexing: " + indexResponse.getMessage();
@@ -349,6 +345,7 @@ public void indexDatasetInNewTransaction(Long datasetId) { //Dataset dataset) {
private static final Map INDEXING_NOW = new ConcurrentHashMap<>();
// semaphore for async indexing
private static final Semaphore ASYNC_INDEX_SEMAPHORE = new Semaphore(JvmSettings.MAX_ASYNC_INDEXES.lookupOptional(Integer.class).orElse(4), true);
+ static final int COMMIT_WITHIN = 30000; //Same as current autoHardIndex time
@Inject
@Metric(name = "index_permit_wait_time", absolute = true, unit = MetricUnits.NANOSECONDS,
@@ -878,6 +875,9 @@ public SolrInputDocuments toSolrDocs(IndexableDataset indexableDataset, Set d
final SolrInputDocuments docs = toSolrDocs(indexableDataset, datafilesInDraftVersion);
try {
- solrClientService.getSolrClient().add(docs.getDocuments());
- solrClientService.getSolrClient().commit();
+ solrClientService.getSolrClient().add(docs.getDocuments(), COMMIT_WITHIN);
} catch (SolrServerException | IOException ex) {
if (ex.getCause() instanceof SolrServerException) {
throw new SolrServerException(ex);
@@ -1778,8 +1780,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc
sid.removeField(SearchFields.SUBTREE);
sid.addField(SearchFields.SUBTREE, paths);
- UpdateResponse addResponse = solrClientService.getSolrClient().add(sid);
- UpdateResponse commitResponse = solrClientService.getSolrClient().commit();
+ UpdateResponse addResponse = solrClientService.getSolrClient().add(sid, COMMIT_WITHIN);
if (object.isInstanceofDataset()) {
for (DataFile df : dataset.getFiles()) {
solrQuery.setQuery(SearchUtil.constructQuery(SearchFields.ENTITY_ID, df.getId().toString()));
@@ -1792,8 +1793,7 @@ private void updatePathForExistingSolrDocs(DvObject object) throws SolrServerExc
}
sid.removeField(SearchFields.SUBTREE);
sid.addField(SearchFields.SUBTREE, paths);
- addResponse = solrClientService.getSolrClient().add(sid);
- commitResponse = solrClientService.getSolrClient().commit();
+ addResponse = solrClientService.getSolrClient().add(sid, COMMIT_WITHIN);
}
}
}
@@ -1835,12 +1835,7 @@ public String delete(Dataverse doomed) {
logger.fine("deleting Solr document for dataverse " + doomed.getId());
UpdateResponse updateResponse;
try {
- updateResponse = solrClientService.getSolrClient().deleteById(solrDocIdentifierDataverse + doomed.getId());
- } catch (SolrServerException | IOException ex) {
- return ex.toString();
- }
- try {
- solrClientService.getSolrClient().commit();
+ updateResponse = solrClientService.getSolrClient().deleteById(solrDocIdentifierDataverse + doomed.getId(), COMMIT_WITHIN);
} catch (SolrServerException | IOException ex) {
return ex.toString();
}
@@ -1860,12 +1855,7 @@ public String removeSolrDocFromIndex(String doomed) {
logger.fine("deleting Solr document: " + doomed);
UpdateResponse updateResponse;
try {
- updateResponse = solrClientService.getSolrClient().deleteById(doomed);
- } catch (SolrServerException | IOException ex) {
- return ex.toString();
- }
- try {
- solrClientService.getSolrClient().commit();
+ updateResponse = solrClientService.getSolrClient().deleteById(doomed, COMMIT_WITHIN);
} catch (SolrServerException | IOException ex) {
return ex.toString();
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
index 3ebc5b42fcb..02649cec68c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchFields.java
@@ -217,6 +217,15 @@ public class SearchFields {
public static final String DEFINITION_POINT_DVOBJECT_ID = "definitionPointDvObjectId";
public static final String DISCOVERABLE_BY = "discoverableBy";
+ /**
+ * publicObject_b is an experimental field tied to the
+ * avoid-expensive-solr-join feature flag. Rather than discoverableBy which
+ * is a field on permission documents, publicObject_b is a field on content
+ * documents (dvObjects). By indexing publicObject_b=true, we can let guests
+ * search on it, avoiding an expensive join for those (common) users.
+ */
+ public static final String PUBLIC_OBJECT = "publicObject_b";
+
/**
* i.e. "Unpublished", "Draft" (multivalued)
*/
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
index 42d61231f93..3d6a563da58 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SearchServiceBean.java
@@ -16,6 +16,7 @@
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import edu.harvard.iq.dataverse.authorization.users.User;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
+import edu.harvard.iq.dataverse.settings.FeatureFlags;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.io.IOException;
@@ -1001,14 +1002,132 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ
user = GuestUser.get();
}
+ AuthenticatedUser au = null;
+ Set groups;
+
+ if (user instanceof GuestUser) {
+ // Yes, GuestUser may be part of one or more groups; such as IP Groups.
+ groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest));
+ } else {
+ if (!(user instanceof AuthenticatedUser)) {
+ logger.severe("Should never reach here. A User must be an AuthenticatedUser or a Guest");
+ throw new IllegalStateException("A User must be an AuthenticatedUser or a Guest");
+ }
+
+ au = (AuthenticatedUser) user;
+
+ // ----------------------------------------------------
+ // (3) Is this a Super User?
+ // If so, they can see everything
+ // ----------------------------------------------------
+ if (au.isSuperuser()) {
+ // Somewhat dangerous because this user (a superuser) will be able
+ // to see everything in Solr with no regard to permissions. But it's
+ // been this way since Dataverse 4.0. So relax. :)
+
+ return dangerZoneNoSolrJoin;
+ }
+
+ // ----------------------------------------------------
+ // (4) User is logged in AND onlyDatatRelatedToMe == true
+ // Yes, give back everything -> the settings will be in
+ // the filterqueries given to search
+ // ----------------------------------------------------
+ if (onlyDatatRelatedToMe == true) {
+ if (systemConfig.myDataDoesNotUsePermissionDocs()) {
+ logger.fine("old 4.2 behavior: MyData is not using Solr permission docs");
+ return dangerZoneNoSolrJoin;
+ } else {
+ // fall-through
+ logger.fine("new post-4.2 behavior: MyData is using Solr permission docs");
+ }
+ }
+
+ // ----------------------------------------------------
+ // (5) Work with Authenticated User who is not a Superuser
+ // ----------------------------------------------------
+
+ groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest));
+ }
+
+ if (FeatureFlags.AVOID_EXPENSIVE_SOLR_JOIN.enabled()) {
+ /**
+ * Instead of doing a super expensive join, we will rely on the
+ * new boolean field PublicObject:true for public objects. This field
+ * is indexed on the content document itself, rather than a permission
+ * document. An additional join will be added only for any extra,
+ * more restricted groups that the user may be part of.
+ * **Note the experimental nature of this optimization**.
+ */
+ StringBuilder sb = new StringBuilder();
+ StringBuilder sbgroups = new StringBuilder();
+
+ // All users, guests and authenticated, should see all the
+ // documents marked as publicObject_b:true, at least:
+ sb.append(SearchFields.PUBLIC_OBJECT + ":" + true);
+
+ // One or more groups *may* also be available for this user. Once again,
+ // do note that Guest users may be part of some groups, such as
+ // IP groups.
+
+ int groupCounter = 0;
+
+ // An AuthenticatedUser should also be able to see all the content
+ // on which they have direct permissions:
+ if (au != null) {
+ groupCounter++;
+ sbgroups.append(IndexServiceBean.getGroupPerUserPrefix() + au.getId());
+ }
+
+ // In addition to the user referenced directly, we will also
+ // add joins on all the non-public groups that may exist for the
+ // user:
+ for (Group group : groups) {
+ String groupAlias = group.getAlias();
+ if (groupAlias != null && !groupAlias.isEmpty() && !groupAlias.startsWith("builtIn")) {
+ groupCounter++;
+ if (groupCounter > 1) {
+ sbgroups.append(" OR ");
+ }
+ sbgroups.append(IndexServiceBean.getGroupPrefix() + groupAlias);
+ }
+ }
+
+ if (groupCounter > 1) {
+ // If there is more than one group, the parentheses must be added:
+ sbgroups.insert(0, "(");
+ sbgroups.append(")");
+ }
+
+ if (groupCounter > 0) {
+ // If there are any groups for this user, an extra join must be
+ // added to the query, and the extra sub-query must be added to
+ // the combined Solr query:
+ sb.append(" OR {!join from=" + SearchFields.DEFINITION_POINT + " to=id v=$q1}");
+ // Add the subquery to the combined Solr query:
+ solrQuery.setParam("q1", SearchFields.DISCOVERABLE_BY + ":" + sbgroups.toString());
+ logger.info("The sub-query q1 set to " + SearchFields.DISCOVERABLE_BY + ":" + sbgroups.toString());
+ }
+
+ String ret = sb.toString();
+ logger.info("Returning experimental query: " + ret);
+ return ret;
+ }
+
+ // END OF EXPERIMENTAL OPTIMIZATION
+
+ // Old, un-optimized way of handling permissions.
+ // Largely left intact, minus the lookups that have already been performed
+ // above.
+
// ----------------------------------------------------
// (1) Is this a GuestUser?
- // Yes, see if GuestUser is part of any groups such as IP Groups.
// ----------------------------------------------------
if (user instanceof GuestUser) {
- String groupsFromProviders = "";
- Set groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest));
+
StringBuilder sb = new StringBuilder();
+
+ String groupsFromProviders = "";
for (Group group : groups) {
logger.fine("found group " + group.getIdentifier() + " with alias " + group.getAlias());
String groupAlias = group.getAlias();
@@ -1025,51 +1144,11 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ
return guestWithGroups;
}
- // ----------------------------------------------------
- // (2) Retrieve Authenticated User
- // ----------------------------------------------------
- if (!(user instanceof AuthenticatedUser)) {
- logger.severe("Should never reach here. A User must be an AuthenticatedUser or a Guest");
- throw new IllegalStateException("A User must be an AuthenticatedUser or a Guest");
- }
-
- AuthenticatedUser au = (AuthenticatedUser) user;
-
- // if (addFacets) {
- // // Logged in user, has publication status facet
- // //
- // solrQuery.addFacetField(SearchFields.PUBLICATION_STATUS);
- // }
-
- // ----------------------------------------------------
- // (3) Is this a Super User?
- // Yes, give back everything
- // ----------------------------------------------------
- if (au.isSuperuser()) {
- // Somewhat dangerous because this user (a superuser) will be able
- // to see everything in Solr with no regard to permissions. But it's
- // been this way since Dataverse 4.0. So relax. :)
-
- return dangerZoneNoSolrJoin;
- }
-
- // ----------------------------------------------------
- // (4) User is logged in AND onlyDatatRelatedToMe == true
- // Yes, give back everything -> the settings will be in
- // the filterqueries given to search
- // ----------------------------------------------------
- if (onlyDatatRelatedToMe == true) {
- if (systemConfig.myDataDoesNotUsePermissionDocs()) {
- logger.fine("old 4.2 behavior: MyData is not using Solr permission docs");
- return dangerZoneNoSolrJoin;
- } else {
- logger.fine("new post-4.2 behavior: MyData is using Solr permission docs");
- }
- }
-
// ----------------------------------------------------
// (5) Work with Authenticated User who is not a Superuser
- // ----------------------------------------------------
+ // ----------------------------------------------------
+ // It was already confirmed, that if the user is not GuestUser, we
+ // have an AuthenticatedUser au which is not null.
/**
* @todo all this code needs cleanup and clarification.
*/
@@ -1100,7 +1179,6 @@ private String getPermissionFilterQuery(DataverseRequest dataverseRequest, SolrQ
* a given "content document" (dataset version, etc) in Solr.
*/
String groupsFromProviders = "";
- Set groups = groupService.collectAncestors(groupService.groupsFor(dataverseRequest));
StringBuilder sb = new StringBuilder();
for (Group group : groups) {
logger.fine("found group " + group.getIdentifier() + " with alias " + group.getAlias());
diff --git a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
index 04021eb75b6..19235bb5a14 100644
--- a/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/search/SolrIndexServiceBean.java
@@ -356,8 +356,7 @@ private void persistToSolr(Collection docs) throws SolrServer
/**
* @todo Do something with these responses from Solr.
*/
- UpdateResponse addResponse = solrClientService.getSolrClient().add(docs);
- UpdateResponse commitResponse = solrClientService.getSolrClient().commit();
+ UpdateResponse addResponse = solrClientService.getSolrClient().add(docs, IndexServiceBean.COMMIT_WITHIN);
}
public IndexResponse indexPermissionsOnSelfAndChildren(long definitionPointId) {
@@ -497,26 +496,20 @@ public IndexResponse deleteMultipleSolrIds(List solrIdsToDelete) {
return new IndexResponse("nothing to delete");
}
try {
- solrClientService.getSolrClient().deleteById(solrIdsToDelete);
+ solrClientService.getSolrClient().deleteById(solrIdsToDelete, IndexServiceBean.COMMIT_WITHIN);
} catch (SolrServerException | IOException ex) {
/**
* @todo mark these for re-deletion
*/
return new IndexResponse("problem deleting the following documents from Solr: " + solrIdsToDelete);
}
- try {
- solrClientService.getSolrClient().commit();
- } catch (SolrServerException | IOException ex) {
- return new IndexResponse("problem committing deletion of the following documents from Solr: " + solrIdsToDelete);
- }
return new IndexResponse("no known problem deleting the following documents from Solr:" + solrIdsToDelete);
}
public JsonObjectBuilder deleteAllFromSolrAndResetIndexTimes() throws SolrServerException, IOException {
JsonObjectBuilder response = Json.createObjectBuilder();
logger.info("attempting to delete all Solr documents before a complete re-index");
- solrClientService.getSolrClient().deleteByQuery("*:*");
- solrClientService.getSolrClient().commit();
+ solrClientService.getSolrClient().deleteByQuery("*:*", IndexServiceBean.COMMIT_WITHIN);
int numRowsAffected = dvObjectService.clearAllIndexTimes();
response.add(numRowsClearedByClearAllIndexTimes, numRowsAffected);
response.add(messageString, "Solr index and database index timestamps cleared.");
diff --git a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
index afa5a1c986a..14a7ab86f22 100644
--- a/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
+++ b/src/main/java/edu/harvard/iq/dataverse/settings/FeatureFlags.java
@@ -36,6 +36,28 @@ public enum FeatureFlags {
* @since Dataverse @TODO:
*/
API_BEARER_AUTH("api-bearer-auth"),
+ /**
+ * For published (public) objects, don't use a join when searching Solr.
+ * Experimental! Requires a reindex with the following feature flag enabled,
+ * in order to add the boolean publicObject_b:true field to all the public
+ * Solr documents.
+ *
+ * @apiNote Raise flag by setting
+ * "dataverse.feature.avoid-expensive-solr-join"
+ * @since Dataverse 6.3
+ */
+ AVOID_EXPENSIVE_SOLR_JOIN("avoid-expensive-solr-join"),
+ /**
+ * With this flag enabled, the boolean field publicObject_b:true will be
+ * added to all the indexed Solr documents for publicly-available collections,
+ * datasets and files. This flag makes it possible to rely on it in searches,
+ * instead of the very expensive join (the feature flag above).
+ *
+ * @apiNote Raise flag by setting
+ * "dataverse.feature.add-publicobject-solr-field"
+ * @since Dataverse 6.3
+ */
+ ADD_PUBLICOBJECT_SOLR_FIELD("add-publicobject-solr-field"),
;
final String flag;
diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties
index 0441853eee9..2996ccb509b 100644
--- a/src/main/java/propertyFiles/Bundle.properties
+++ b/src/main/java/propertyFiles/Bundle.properties
@@ -2694,6 +2694,7 @@ datasets.api.datasize.ioerror=Fatal IO error while trying to determine the total
datasets.api.grant.role.not.found.error=Cannot find role named ''{0}'' in dataverse {1}
datasets.api.grant.role.cant.create.assignment.error=Cannot create assignment: {0}
datasets.api.grant.role.assignee.not.found.error=Assignee not found
+datasets.api.grant.role.assignee.has.role.error=User already has this role for this dataset
datasets.api.revoke.role.not.found.error="Role assignment {0} not found"
datasets.api.revoke.role.success=Role {0} revoked for assignee {1} in {2}
datasets.api.privateurl.error.datasetnotfound=Could not find dataset.
@@ -3023,3 +3024,8 @@ publishDatasetCommand.pidNotReserved=Cannot publish dataset because its persiste
api.errors.invalidApiToken=Invalid API token.
api.ldninbox.citation.alert={0},
The {1} has just been notified that the {2}, {3}, cites "{6}" in this repository.
api.ldninbox.citation.subject={0}: A Dataset Citation has been reported!
+
+#Info.java
+openapi.exception.invalid.format=Invalid format {0}, currently supported formats are YAML and JSON.
+openapi.exception=Supported format definition not found.
+openapi.exception.unaligned=Unaligned parameters on Headers [{0}] and Request [{1}]
diff --git a/src/main/webapp/dataset.xhtml b/src/main/webapp/dataset.xhtml
index 527b829960f..13faf9d7f20 100644
--- a/src/main/webapp/dataset.xhtml
+++ b/src/main/webapp/dataset.xhtml
@@ -86,7 +86,7 @@
-
+
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
index 5b603d88c6d..d2d14b824bd 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DatasetsIT.java
@@ -1717,6 +1717,9 @@ public void testAddRoles(){
giveRandoPermission.prettyPrint();
assertEquals(200, giveRandoPermission.getStatusCode());
+ //Asserting same role creation is covered
+ validateAssignExistingRole(datasetPersistentId,randomUsername,apiToken, "fileDownloader");
+
// Create another random user to become curator:
Response createCuratorUser = UtilIT.createRandomUser();
@@ -1853,6 +1856,14 @@ public void testListRoleAssignments() {
assertEquals(UNAUTHORIZED.getStatusCode(), notPermittedToListRoleAssignmentOnDataverse.getStatusCode());
}
+ private static void validateAssignExistingRole(String datasetPersistentId, String randomUsername, String apiToken, String role) {
+ final Response failedGrantPermission = UtilIT.grantRoleOnDataset(datasetPersistentId, role, "@" + randomUsername, apiToken);
+ failedGrantPermission.prettyPrint();
+ failedGrantPermission.then().assertThat()
+ .body("message", containsString("User already has this role for this dataset"))
+ .statusCode(FORBIDDEN.getStatusCode());
+ }
+
@Test
public void testFileChecksum() {
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
index 01f4a4646fe..b072a803aa4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java
@@ -438,7 +438,7 @@ public void testMoveDataverse() {
while (checkIndex) {
try {
try {
- Thread.sleep(4000);
+ Thread.sleep(6000);
} catch (InterruptedException ex) {
}
Response search = UtilIT.search("id:dataverse_" + dataverseId + "&subtree=" + dataverseAlias2, apiToken);
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
index 3d5691dbe03..5e436dd0e98 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/InfoIT.java
@@ -1,13 +1,12 @@
package edu.harvard.iq.dataverse.api;
import static io.restassured.RestAssured.given;
-
import io.restassured.response.Response;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
-
+import static jakarta.ws.rs.core.Response.Status.BAD_REQUEST;
import static jakarta.ws.rs.core.Response.Status.NOT_FOUND;
import static jakarta.ws.rs.core.Response.Status.OK;
import static org.hamcrest.CoreMatchers.equalTo;
@@ -82,6 +81,7 @@ public void testGetZipDownloadLimit() {
.body("data", notNullValue());
}
+
private void testSettingEndpoint(SettingsServiceBean.Key settingKey, String testSettingValue) {
String endpoint = "/api/info/settings/" + settingKey;
// Setting not found
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java b/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
index 907d3dec4bc..2f46960f9a8 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/LinkIT.java
@@ -9,6 +9,8 @@
import static jakarta.ws.rs.core.Response.Status.FORBIDDEN;
import static jakarta.ws.rs.core.Response.Status.OK;
import static org.hamcrest.CoreMatchers.equalTo;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -163,6 +165,8 @@ public void testDeepLinks() {
.statusCode(OK.getStatusCode())
.body("data.message", equalTo("Dataverse " + level1a + " linked successfully to " + level1b));
+ assertTrue(UtilIT.sleepForSearch("*", apiToken, "&subtree="+level1b, 1, UtilIT.GENERAL_LONG_DURATION), "Zero counts in level1b");
+
Response searchLevel1toLevel1 = UtilIT.search("*", apiToken, "&subtree=" + level1b);
searchLevel1toLevel1.prettyPrint();
searchLevel1toLevel1.then().assertThat()
@@ -184,6 +188,8 @@ public void testDeepLinks() {
.statusCode(OK.getStatusCode())
.body("data.message", equalTo("Dataverse " + level2a + " linked successfully to " + level2b));
+ assertTrue(UtilIT.sleepForSearch("*", apiToken, "&subtree=" + level2b, 1, UtilIT.GENERAL_LONG_DURATION), "Never found linked dataverse: " + level2b);
+
Response searchLevel2toLevel2 = UtilIT.search("*", apiToken, "&subtree=" + level2b);
searchLevel2toLevel2.prettyPrint();
searchLevel2toLevel2.then().assertThat()
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/OpenApiIT.java b/src/test/java/edu/harvard/iq/dataverse/api/OpenApiIT.java
new file mode 100644
index 00000000000..eb98bdcda8e
--- /dev/null
+++ b/src/test/java/edu/harvard/iq/dataverse/api/OpenApiIT.java
@@ -0,0 +1,40 @@
+package edu.harvard.iq.dataverse.api;
+
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import io.restassured.RestAssured;
+import io.restassured.response.Response;
+
+public class OpenApiIT {
+
+ @BeforeAll
+ public static void setUpClass() {
+ RestAssured.baseURI = UtilIT.getRestAssuredBaseUri();
+ }
+
+ @Test
+ public void testOpenApi(){
+
+ Response openApi = UtilIT.getOpenAPI("application/json", "json");
+ openApi.prettyPrint();
+ openApi.then().assertThat()
+ .statusCode(200);
+
+ openApi = UtilIT.getOpenAPI("", "json");
+ openApi.prettyPrint();
+ openApi.then().assertThat()
+ .statusCode(200);
+
+ openApi = UtilIT.getOpenAPI("", "yaml");
+ openApi.prettyPrint();
+ openApi.then().assertThat()
+ .statusCode(200);
+
+ openApi = UtilIT.getOpenAPI("application/json", "yaml");
+ openApi.prettyPrint();
+ openApi.then().assertThat()
+ .statusCode(400);
+
+
+ }
+}
\ No newline at end of file
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
index e08999f59e8..6e4fd5b0bb3 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/SearchIT.java
@@ -105,7 +105,7 @@ public void testSearchPermisions() throws InterruptedException {
assertEquals(200, grantUser2AccessOnDataset.getStatusCode());
String searchPart = "id:dataset_" + datasetId1 + "_draft";
- assertTrue(UtilIT.sleepForSearch(searchPart, apiToken2, "", UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if search exceeds max duration " + searchPart);
+ assertTrue(UtilIT.sleepForSearch(searchPart, apiToken2, "", 1, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if search exceeds max duration " + searchPart);
Response shouldBeVisibleToUser2 = UtilIT.search("id:dataset_" + datasetId1 + "_draft", apiToken2);
shouldBeVisibleToUser2.prettyPrint();
@@ -793,14 +793,9 @@ public void testNestedSubtree() {
Response createDataverseResponse2 = UtilIT.createSubDataverse("subDV" + UtilIT.getRandomIdentifier(), null, apiToken, dataverseAlias);
createDataverseResponse2.prettyPrint();
String dataverseAlias2 = UtilIT.getAliasFromResponse(createDataverseResponse2);
-
+
String searchPart = "*";
-
- Response searchUnpublishedSubtree = UtilIT.search(searchPart, apiToken, "&subtree="+dataverseAlias);
- searchUnpublishedSubtree.prettyPrint();
- searchUnpublishedSubtree.then().assertThat()
- .statusCode(OK.getStatusCode())
- .body("data.total_count", CoreMatchers.equalTo(1));
+ assertTrue(UtilIT.sleepForSearch(searchPart, apiToken, "&subtree=" + dataverseAlias, 1, UtilIT.GENERAL_LONG_DURATION), "Missing subDV");
Response searchUnpublishedSubtree2 = UtilIT.search(searchPart, apiToken, "&subtree="+dataverseAlias2);
searchUnpublishedSubtree2.prettyPrint();
@@ -863,18 +858,8 @@ public void testNestedSubtree() {
publishDataset.then().assertThat()
.statusCode(OK.getStatusCode());
UtilIT.sleepForReindex(datasetPid, apiToken, 5);
- Response searchPublishedSubtreeWDS = UtilIT.search(searchPart, apiToken, "&subtree="+dataverseAlias);
- searchPublishedSubtreeWDS.prettyPrint();
- searchPublishedSubtreeWDS.then().assertThat()
- .statusCode(OK.getStatusCode())
- .body("data.total_count", CoreMatchers.equalTo(2));
-
- Response searchPublishedSubtreeWDS2 = UtilIT.search(searchPart, apiToken, "&subtree="+dataverseAlias2);
- searchPublishedSubtreeWDS2.prettyPrint();
- searchPublishedSubtreeWDS2.then().assertThat()
- .statusCode(OK.getStatusCode())
- .body("data.total_count", CoreMatchers.equalTo(1));
-
+ assertTrue(UtilIT.sleepForSearch(searchPart, apiToken, "&subtree=" + dataverseAlias, 2, UtilIT.GENERAL_LONG_DURATION), "Did not find 2 children");
+ assertTrue(UtilIT.sleepForSearch(searchPart, apiToken, "&subtree=" + dataverseAlias2, 1, UtilIT.GENERAL_LONG_DURATION), "Did not find 1 child");
}
//If this test fails it'll fail inconsistently as it tests underlying async role code
@@ -906,16 +891,16 @@ public void testCuratorCardDataversePopulation() throws InterruptedException {
String subDataverseAlias = "dv" + UtilIT.getRandomIdentifier();
Response createSubDataverseResponse = UtilIT.createSubDataverse(subDataverseAlias, null, apiTokenSuper, parentDataverseAlias);
createSubDataverseResponse.prettyPrint();
- //UtilIT.getAliasFromResponse(createSubDataverseResponse);
-
+
Response grantRoleOnDataverseResponse = UtilIT.grantRoleOnDataverse(subDataverseAlias, "curator", "@" + username, apiTokenSuper);
grantRoleOnDataverseResponse.then().assertThat()
.statusCode(OK.getStatusCode());
-
+
String searchPart = "*";
+ assertTrue(UtilIT.sleepForSearch(searchPart, apiToken, "&subtree="+parentDataverseAlias, 1, UtilIT.GENERAL_LONG_DURATION), "Failed test if search exceeds max duration " + searchPart);
+
Response searchPublishedSubtreeSuper = UtilIT.search(searchPart, apiTokenSuper, "&subtree="+parentDataverseAlias);
- assertTrue(UtilIT.sleepForSearch(searchPart, apiToken, "&subtree="+parentDataverseAlias, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Failed test if search exceeds max duration " + searchPart);
searchPublishedSubtreeSuper.prettyPrint();
searchPublishedSubtreeSuper.then().assertThat()
.statusCode(OK.getStatusCode())
@@ -968,7 +953,7 @@ public void testSubtreePermissions() {
.statusCode(OK.getStatusCode());
// Wait a little while for the index to pick up the datasets, otherwise timing issue with searching for it.
- UtilIT.sleepForReindex(datasetId2.toString(), apiToken, 2);
+ UtilIT.sleepForReindex(datasetId2.toString(), apiToken, 3);
String identifier = JsonPath.from(datasetAsJson.getBody().asString()).getString("data.identifier");
String identifier2 = JsonPath.from(datasetAsJson2.getBody().asString()).getString("data.identifier");
@@ -1077,6 +1062,8 @@ public void testSubtreePermissions() {
.statusCode(OK.getStatusCode())
.body("data.total_count", CoreMatchers.equalTo(2));
+ assertTrue(UtilIT.sleepForSearch(searchPart, null, "&subtree=" + dataverseAlias2, 1, UtilIT.MAXIMUM_INGEST_LOCK_DURATION), "Missing dataset w/no apiKey");
+
Response searchPublishedSubtreesNoAPI = UtilIT.search(searchPart, null, "&subtree="+dataverseAlias+"&subtree="+dataverseAlias2);
searchPublishedSubtreesNoAPI.prettyPrint();
searchPublishedSubtreesNoAPI.then().assertThat()
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
index 5880b08e5c2..0189ffd6e58 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UsersIT.java
@@ -8,6 +8,7 @@
import edu.harvard.iq.dataverse.authorization.DataverseRole;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import jakarta.json.Json;
@@ -206,15 +207,13 @@ public void testMergeAccounts(){
String aliasInOwner = "groupFor" + dataverseAlias;
String displayName = "Group for " + dataverseAlias;
String user2identifier = "@" + usernameConsumed;
+ String target2identifier = "@" + targetname;
Response createGroup = UtilIT.createGroup(dataverseAlias, aliasInOwner, displayName, superuserApiToken);
createGroup.prettyPrint();
createGroup.then().assertThat()
.statusCode(CREATED.getStatusCode());
- String groupIdentifier = JsonPath.from(createGroup.asString()).getString("data.identifier");
-
- List roleAssigneesToAdd = new ArrayList<>();
- roleAssigneesToAdd.add(user2identifier);
+ List roleAssigneesToAdd = Arrays.asList(user2identifier, target2identifier);
Response addToGroup = UtilIT.addToGroup(dataverseAlias, aliasInOwner, roleAssigneesToAdd, superuserApiToken);
addToGroup.prettyPrint();
addToGroup.then().assertThat()
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
index 507c9b302b3..c107ea97b51 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java
@@ -11,6 +11,7 @@
import jakarta.json.JsonObjectBuilder;
import jakarta.json.JsonArrayBuilder;
import jakarta.json.JsonObject;
+
import static jakarta.ws.rs.core.Response.Status.CREATED;
import java.nio.charset.StandardCharsets;
@@ -63,6 +64,7 @@ public class UtilIT {
private static final String EMPTY_STRING = "";
public static final int MAXIMUM_INGEST_LOCK_DURATION = 15;
public static final int MAXIMUM_PUBLISH_LOCK_DURATION = 20;
+ public static final int GENERAL_LONG_DURATION = 45; //Useful when multiple adds/publishes, etc/ all get done in sequence
public static final int MAXIMUM_IMPORT_DURATION = 1;
private static SwordConfigurationImpl swordConfiguration = new SwordConfigurationImpl();
@@ -2844,6 +2846,13 @@ static boolean sleepForReindex(String idOrPersistentId, String apiToken, int dur
i = repeats + 1;
}
} while ((i <= repeats) && stale);
+ try {
+ Thread.sleep(1000); //Current autoSoftIndexTime - which adds a delay to when the new docs are visible
+ i++;
+ } catch (InterruptedException ex) {
+ Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex);
+ i = repeats + 1;
+ }
System.out.println("Waited " + (i * (sleepStep / 1000.0)) + " seconds");
return i <= repeats;
@@ -2899,10 +2908,15 @@ static Boolean sleepForDeadlock(int duration) {
//Helper function that returns true if a given search returns a non-zero response within a fixed time limit
// a given duration returns false if still zero results after given duration
- static Boolean sleepForSearch(String searchPart, String apiToken, String subTree, int duration) {
+ static Boolean sleepForSearch(String searchPart, String apiToken, String subTree, int count, int duration) {
Response searchResponse = UtilIT.search(searchPart, apiToken, subTree);
+ //Leave early if search isn't working
+ if(searchResponse.statusCode()!=200) {
+ logger.warning("Non-200 status in sleepForSearch: " + searchResponse.statusCode());
+ return false;
+ }
int i = 0;
do {
try {
@@ -2915,8 +2929,8 @@ static Boolean sleepForSearch(String searchPart, String apiToken, String subTre
} catch (InterruptedException ex) {
Logger.getLogger(UtilIT.class.getName()).log(Level.SEVERE, null, ex);
}
- } while (UtilIT.getSearchCountFromResponse(searchResponse) == 0);
-
+ } while (UtilIT.getSearchCountFromResponse(searchResponse) != count);
+ logger.info("Waited " + i + " seconds in sleepForSearch");
return i <= duration;
}
@@ -3954,4 +3968,12 @@ static Response updateDataverseInputLevels(String dataverseAlias, String[] input
.contentType(ContentType.JSON)
.put("/api/dataverses/" + dataverseAlias + "/inputLevels");
}
+
+ public static Response getOpenAPI(String accept, String format) {
+ Response response = given()
+ .header("Accept", accept)
+ .queryParam("format", format)
+ .get("/openapi");
+ return response;
+ }
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
index 33f9acd0e1a..508eac46cb4 100644
--- a/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/engine/command/impl/CreatePrivateUrlCommandTest.java
@@ -2,24 +2,32 @@
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetVersion;
+import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseRoleServiceBean;
+import edu.harvard.iq.dataverse.DvObject;
import edu.harvard.iq.dataverse.RoleAssignment;
import edu.harvard.iq.dataverse.authorization.DataverseRole;
+import edu.harvard.iq.dataverse.authorization.RoleAssignee;
import edu.harvard.iq.dataverse.authorization.users.PrivateUrlUser;
import edu.harvard.iq.dataverse.engine.TestCommandContext;
import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.privateurl.PrivateUrl;
import edu.harvard.iq.dataverse.privateurl.PrivateUrlServiceBean;
+import edu.harvard.iq.dataverse.search.IndexResponse;
+import edu.harvard.iq.dataverse.search.IndexServiceBean;
+import edu.harvard.iq.dataverse.search.SolrIndexServiceBean;
import edu.harvard.iq.dataverse.util.SystemConfig;
import java.util.ArrayList;
import java.util.List;
+import java.util.concurrent.Future;
+
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.*;
-import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class CreatePrivateUrlCommandTest {
@@ -73,6 +81,10 @@ public RoleAssignment save(RoleAssignment assignment) {
// no-op
return assignment;
}
+ @Override
+ public List directRoleAssignments(RoleAssignee roas, DvObject dvo) {
+ return List.of();
+ }
};
}
@@ -89,6 +101,16 @@ public String getDataverseSiteUrl() {
};
}
+
+ @Override
+ public SolrIndexServiceBean solrIndex() {
+ return new SolrIndexServiceBean(){
+ @Override
+ public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) {
+ return null;
+ }
+ };
+ }
}
);
diff --git a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java
index 92fd6731e93..89f04e0cd5a 100644
--- a/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/util/cache/CacheFactoryBeanTest.java
@@ -18,6 +18,7 @@
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.parallel.ResourceLock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
@@ -154,6 +155,7 @@ public void testAdminUserExemptFromGettingRateLimited() {
@Test
@Tag(Tags.NOT_ESSENTIAL_UNITTESTS)
+ @ResourceLock(value = "cache")
public void testAuthenticatedUserGettingRateLimited() throws InterruptedException {
Command action = new ListFacetsCommand(null,null);
authUser.setRateLimitTier(2); // 120 cals per hour - 1 added token every 30 seconds
diff --git a/tests/integration-tests.txt b/tests/integration-tests.txt
index 58d8d814bb9..44bbfdcceb7 100644
--- a/tests/integration-tests.txt
+++ b/tests/integration-tests.txt
@@ -1 +1 @@
-DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,DataRetrieverApiIT,ProvIT,S3AccessIT
+DataversesIT,DatasetsIT,SwordIT,AdminIT,BuiltinUsersIT,UsersIT,UtilIT,ConfirmEmailIT,FileMetadataIT,FilesIT,SearchIT,InReviewWorkflowIT,HarvestingServerIT,HarvestingClientsIT,MoveIT,MakeDataCountApiIT,FileTypeDetectionIT,EditDDIIT,ExternalToolsIT,AccessIT,DuplicateFilesIT,DownloadFilesIT,LinkIT,DeleteUsersIT,DeactivateUsersIT,AuxiliaryFilesIT,InvalidCharactersIT,LicensesIT,NotificationsIT,BagIT,MetadataBlocksIT,NetcdfIT,SignpostingIT,FitsIT,LogoutIT,DataRetrieverApiIT,ProvIT,S3AccessIT,OpenApiIT,InfoIT