From 7f5b0bea1670b5c2ec84651b45820211b9df2988 Mon Sep 17 00:00:00 2001 From: GPortas Date: Tue, 15 Oct 2024 13:34:34 +0100 Subject: [PATCH 01/16] Added: updateDataverse endpoint with addDataverse refactoring --- .../harvard/iq/dataverse/api/Dataverses.java | 160 ++++++++++---- .../command/impl/CreateDataverseCommand.java | 6 +- .../command/impl/UpdateDataverseCommand.java | 204 ++++++++++-------- 3 files changed, 231 insertions(+), 139 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 0ee146ed99b..b85ee0afc8f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -127,73 +127,145 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { @Path("{identifier}") public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; - JsonObject newDataverseJson; try { - newDataverseJson = JsonUtil.getJsonObject(body); - newDataverse = jsonParser().parseDataverse(newDataverseJson); + newDataverse = parseAndValidateDataverse(body); } catch (JsonParsingException jpe) { - logger.log(Level.SEVERE, "Json: {0}", body); return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { - logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); } try { - JsonObject metadataBlocksJson = newDataverseJson.getJsonObject("metadataBlocks"); - List inputLevels = null; - List metadataBlocks = null; - List facetList = null; - if (metadataBlocksJson != null) { - JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); - inputLevels = inputLevelsArray != null ? parseInputLevels(inputLevelsArray, newDataverse) : null; - - JsonArray metadataBlockNamesArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); - metadataBlocks = metadataBlockNamesArray != null ? parseNewDataverseMetadataBlocks(metadataBlockNamesArray) : null; - - JsonArray facetIdsArray = metadataBlocksJson.getJsonArray("facetIds"); - facetList = facetIdsArray != null ? parseFacets(facetIdsArray) : null; - } + List inputLevels = parseInputLevels(body, newDataverse); + List metadataBlocks = parseMetadataBlocks(body); + List facets = parseFacets(body); if (!parentIdtf.isEmpty()) { Dataverse owner = findDataverseOrDie(parentIdtf); newDataverse.setOwner(owner); } - // set the dataverse - contact relationship in the contacts - for (DataverseContact dc : newDataverse.getDataverseContacts()) { - dc.setDataverse(newDataverse); - } - AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facetList, inputLevels, metadataBlocks)); + newDataverse = execCommand(new CreateDataverseCommand(newDataverse, createDataverseRequest(u), facets, inputLevels, metadataBlocks)); return created("/dataverses/" + newDataverse.getAlias(), json(newDataverse)); - } catch (WrappedResponse ww) { - - String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); - if (!error.isEmpty()) { - logger.log(Level.INFO, error); - return ww.refineResponse(error); - } - return ww.getResponse(); + } catch (WrappedResponse ww) { + return handleWrappedResponse(ww); } catch (EJBException ex) { - Throwable cause = ex; - StringBuilder sb = new StringBuilder(); - sb.append("Error creating dataverse."); - while (cause.getCause() != null) { - cause = cause.getCause(); - if (cause instanceof ConstraintViolationException) { - sb.append(ConstraintViolationUtil.getErrorStringForConstraintViolations(cause)); - } - } - logger.log(Level.SEVERE, sb.toString()); - return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + sb.toString()); + return handleEJBException(ex, "Error creating dataverse."); } catch (Exception ex) { logger.log(Level.SEVERE, "Error creating dataverse", ex); return error(Response.Status.INTERNAL_SERVER_ERROR, "Error creating dataverse: " + ex.getMessage()); + } + } + + @PUT + @AuthRequired + @Path("{identifier}") + public Response updateDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String identifier) { + Dataverse originalDataverse; + try { + originalDataverse = findDataverseOrDie(identifier); + } catch (WrappedResponse e) { + return e.getResponse(); + } + + Dataverse updatedDataverse; + try { + updatedDataverse = parseAndValidateDataverse(body); + } catch (JsonParsingException jpe) { + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); + } catch (JsonParseException ex) { + return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparsetodataverse"), ex.getMessage())); + } + + try { + List inputLevels = parseInputLevels(body, originalDataverse); + List metadataBlocks = parseMetadataBlocks(body); + List facets = parseFacets(body); + + updatedDataverse.setId(originalDataverse.getId()); + AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); + updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); + return created("/dataverses/" + updatedDataverse.getAlias(), json(updatedDataverse)); + + } catch (WrappedResponse ww) { + return handleWrappedResponse(ww); + } catch (EJBException ex) { + return handleEJBException(ex, "Error updating dataverse."); + } catch (Exception ex) { + logger.log(Level.SEVERE, "Error updating dataverse", ex); + return error(Response.Status.INTERNAL_SERVER_ERROR, "Error updating dataverse: " + ex.getMessage()); + } + } + + private Dataverse parseAndValidateDataverse(String body) throws JsonParsingException, JsonParseException { + try { + JsonObject dataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverse(dataverseJson); + } catch (JsonParsingException jpe) { + logger.log(Level.SEVERE, "Json: {0}", body); + throw jpe; + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); + throw ex; + } + } + + private List parseInputLevels(String body, Dataverse dataverse) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray inputLevelsArray = metadataBlocksJson.getJsonArray("inputLevels"); + return inputLevelsArray != null ? parseInputLevels(inputLevelsArray, dataverse) : null; + } + + private List parseMetadataBlocks(String body) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray metadataBlocksArray = metadataBlocksJson.getJsonArray("metadataBlockNames"); + return metadataBlocksArray != null ? parseNewDataverseMetadataBlocks(metadataBlocksArray) : null; + } + + private List parseFacets(String body) throws WrappedResponse { + JsonObject metadataBlocksJson = getMetadataBlocksJson(body); + if (metadataBlocksJson == null) { + return null; + } + JsonArray facetsArray = metadataBlocksJson.getJsonArray("facetIds"); + return facetsArray != null ? parseFacets(facetsArray) : null; + } + + private JsonObject getMetadataBlocksJson(String body) { + JsonObject dataverseJson = JsonUtil.getJsonObject(body); + return dataverseJson.getJsonObject("metadataBlocks"); + } + + private Response handleWrappedResponse(WrappedResponse ww) { + String error = ConstraintViolationUtil.getErrorStringForConstraintViolations(ww.getCause()); + if (!error.isEmpty()) { + logger.log(Level.INFO, error); + return ww.refineResponse(error); + } + return ww.getResponse(); + } + + private Response handleEJBException(EJBException ex, String action) { + Throwable cause = ex; + StringBuilder sb = new StringBuilder(); + sb.append(action); + while (cause.getCause() != null) { + cause = cause.getCause(); + if (cause instanceof ConstraintViolationException) { + sb.append(ConstraintViolationUtil.getErrorStringForConstraintViolations(cause)); + } } + logger.log(Level.SEVERE, sb.toString()); + return error(Response.Status.INTERNAL_SERVER_ERROR, sb.toString()); } private List parseNewDataverseMetadataBlocks(JsonArray metadataBlockNamesArray) throws WrappedResponse { diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 489b36e7cef..2ce16a86297 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -67,7 +67,6 @@ public CreateDataverseCommand(Dataverse created, @Override public Dataverse execute(CommandContext ctxt) throws CommandException { - Dataverse owner = created.getOwner(); if (owner == null) { if (ctxt.dataverses().isRootDataverseExists()) { @@ -75,6 +74,10 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } + for (DataverseContact dc : created.getDataverseContacts()) { + dc.setDataverse(created); + } + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { created.setMetadataBlockRoot(true); created.setMetadataBlocks(metadataBlocks); @@ -194,5 +197,4 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { public boolean onSuccess(CommandContext ctxt, Object r) { return ctxt.dataverses().index((Dataverse) r); } - } diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index bdb69dc918f..b1670a264bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -1,13 +1,11 @@ package edu.harvard.iq.dataverse.engine.command.impl; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetFieldType; -import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.Dataverse.DataverseType; -import edu.harvard.iq.dataverse.DataverseFieldTypeInputLevel; import edu.harvard.iq.dataverse.authorization.Permission; import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; + import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; @@ -21,121 +19,141 @@ /** * Update an existing dataverse. + * * @author michael */ -@RequiredPermissions( Permission.EditDataverse ) +@RequiredPermissions(Permission.EditDataverse) public class UpdateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); - - private final Dataverse editedDv; - private final List facetList; + private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); + + private final Dataverse editedDv; + private final List facetList; private final List featuredDataverseList; private final List inputLevelList; + private final List metadataBlocks; private boolean datasetsReindexRequired = false; - public UpdateDataverseCommand(Dataverse editedDv, List facetList, List featuredDataverseList, - DataverseRequest aRequest, List inputLevelList ) { - super(aRequest, editedDv); - this.editedDv = editedDv; - // add update template uses this command but does not - // update facet list or featured dataverses - if (facetList != null){ - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (featuredDataverseList != null){ - this.featuredDataverseList = new ArrayList<>(featuredDataverseList); - } else { - this.featuredDataverseList = null; - } - if (inputLevelList != null){ - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - } - - @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - logger.fine("Entering update dataverse command"); - - // Perform any optional validation steps, if defined: - if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { - // For admins, an override of the external validation step may be enabled: - if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { - String executable = ctxt.systemConfig().getDataverseValidationExecutable(); - boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); - - if (!result) { - String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); - throw new IllegalCommandException(rejectionMessage, this); - } + public UpdateDataverseCommand(Dataverse editedDv, + List facetList, + List featuredDataverseList, + DataverseRequest aRequest, + List inputLevelList) { + this(editedDv, facetList, featuredDataverseList, aRequest, inputLevelList, null); + } + + public UpdateDataverseCommand(Dataverse editedDv, + List facetList, + List featuredDataverseList, + DataverseRequest aRequest, + List inputLevelList, + List metadataBlocks) { + super(aRequest, editedDv); + this.editedDv = editedDv; + // add update template uses this command but does not + // update facet list or featured dataverses + if (facetList != null) { + this.facetList = new ArrayList<>(facetList); + } else { + this.facetList = null; + } + if (featuredDataverseList != null) { + this.featuredDataverseList = new ArrayList<>(featuredDataverseList); + } else { + this.featuredDataverseList = null; + } + if (inputLevelList != null) { + this.inputLevelList = new ArrayList<>(inputLevelList); + } else { + this.inputLevelList = null; + } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + logger.fine("Entering update dataverse command"); + + // Perform any optional validation steps, if defined: + if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { + // For admins, an override of the external validation step may be enabled: + if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { + String executable = ctxt.systemConfig().getDataverseValidationExecutable(); + boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); + + if (!result) { + String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); + throw new IllegalCommandException(rejectionMessage, this); } } - - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); - - DataverseType oldDvType = oldDv.getDataverseType(); - String oldDvAlias = oldDv.getAlias(); - String oldDvName = oldDv.getName(); - oldDv = null; - - Dataverse result = ctxt.dataverses().save(editedDv); - - if ( facetList != null ) { - ctxt.facets().deleteFacetsFor(result); - int i=0; - for ( DatasetFieldType df : facetList ) { - ctxt.facets().create(i++, df.getId(), result.getId()); - } + } + + for (DataverseContact dc : editedDv.getDataverseContacts()) { + dc.setDataverse(editedDv); + } + + Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); + + DataverseType oldDvType = oldDv.getDataverseType(); + String oldDvAlias = oldDv.getAlias(); + String oldDvName = oldDv.getName(); + + Dataverse result = ctxt.dataverses().save(editedDv); + + if (facetList != null) { + ctxt.facets().deleteFacetsFor(result); + int i = 0; + for (DatasetFieldType df : facetList) { + ctxt.facets().create(i++, df.getId(), result.getId()); } - if ( featuredDataverseList != null ) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); - int i=0; - for ( Object obj : featuredDataverseList ) { - Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); - } + } + if (featuredDataverseList != null) { + ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); + int i = 0; + for (Object obj : featuredDataverseList) { + Dataverse dv = (Dataverse) obj; + ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); } - if ( inputLevelList != null ) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(result); - for ( DataverseFieldTypeInputLevel obj : inputLevelList ) { - ctxt.fieldTypeInputLevels().create(obj); - } + } + if (inputLevelList != null) { + ctxt.fieldTypeInputLevels().deleteFacetsFor(result); + for (DataverseFieldTypeInputLevel obj : inputLevelList) { + ctxt.fieldTypeInputLevels().create(obj); } - - // We don't want to reindex the children datasets unnecessarily: - // When these values are changed we need to reindex all children datasets - // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) + } + + // We don't want to reindex the children datasets unnecessarily: + // When these values are changed we need to reindex all children datasets + // This check is not recursive as all the values just report the immediate parent + if (!oldDvType.equals(editedDv.getDataverseType()) || !oldDvName.equals(editedDv.getName()) || !oldDvAlias.equals(editedDv.getAlias())) { - datasetsReindexRequired = true; - } - - return result; - } - + datasetsReindexRequired = true; + } + + return result; + } + @Override public boolean onSuccess(CommandContext ctxt, Object r) { - + // first kick of async index of datasets // TODO: is this actually needed? Is there a better way to handle // It appears that we at some point lost some extra logic here, where // we only reindex the underlying datasets if one or more of the specific set - // of fields have been changed (since these values are included in the + // of fields have been changed (since these values are included in the // indexed solr documents for dataasets). So I'm putting that back. -L.A. Dataverse result = (Dataverse) r; - + if (datasetsReindexRequired) { List datasets = ctxt.datasets().findByOwnerId(result.getId()); ctxt.index().asyncIndexDatasetList(datasets, true); } - - return ctxt.dataverses().index((Dataverse) r); - } + return ctxt.dataverses().index((Dataverse) r); + } } - From 19c8a12b32a502ee43f46916248a7d4691928aa6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 16 Oct 2024 16:30:00 +0100 Subject: [PATCH 02/16] Changed: limiting the information to update in a dataverse through the new update endpoint --- .../harvard/iq/dataverse/api/Dataverses.java | 8 +- .../iq/dataverse/util/json/JsonParser.java | 79 ++++++++++++++----- 2 files changed, 62 insertions(+), 25 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index b85ee0afc8f..0bc389041c2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -128,7 +128,7 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; try { - newDataverse = parseAndValidateDataverse(body); + newDataverse = parseAndValidateDataverseRequestBody(body, null); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -172,7 +172,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod Dataverse updatedDataverse; try { - updatedDataverse = parseAndValidateDataverse(body); + updatedDataverse = parseAndValidateDataverseRequestBody(body, originalDataverse); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -200,10 +200,10 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod } } - private Dataverse parseAndValidateDataverse(String body) throws JsonParsingException, JsonParseException { + private Dataverse parseAndValidateDataverseRequestBody(String body, Dataverse dataverseToUpdate) throws JsonParsingException, JsonParseException { try { JsonObject dataverseJson = JsonUtil.getJsonObject(body); - return jsonParser().parseDataverse(dataverseJson); + return dataverseToUpdate != null ? jsonParser().parseDataverseUpdates(dataverseJson, dataverseToUpdate) : jsonParser().parseDataverse(dataverseJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); throw jpe; diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index 2f01c9bc2f2..f63e4c4fd9c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -50,6 +50,7 @@ import java.util.Set; import java.util.logging.Logger; import java.util.stream.Collectors; + import jakarta.json.Json; import jakarta.json.JsonArray; import jakarta.json.JsonObject; @@ -128,19 +129,8 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setPermissionRoot(jobj.getBoolean("permissionRoot", false)); dv.setFacetRoot(jobj.getBoolean("facetRoot", false)); dv.setAffiliation(jobj.getString("affiliation", null)); - - if (jobj.containsKey("dataverseContacts")) { - JsonArray dvContacts = jobj.getJsonArray("dataverseContacts"); - int i = 0; - List dvContactList = new LinkedList<>(); - for (JsonValue jsv : dvContacts) { - DataverseContact dvc = new DataverseContact(dv); - dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); - dvc.setDisplayOrder(i++); - dvContactList.add(dvc); - } - dv.setDataverseContacts(dvContactList); - } + + updateDataverseContacts(dv, jobj); if (jobj.containsKey("theme")) { DataverseTheme theme = parseDataverseTheme(jobj.getJsonObject("theme")); @@ -149,14 +139,8 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { } dv.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); // default - if (jobj.containsKey("dataverseType")) { - for (Dataverse.DataverseType dvtype : Dataverse.DataverseType.values()) { - if (dvtype.name().equals(jobj.getString("dataverseType"))) { - dv.setDataverseType(dvtype); - } - } - } - + updateDataverseType(dv, jobj); + if (jobj.containsKey("filePIDsEnabled")) { dv.setFilePIDsEnabled(jobj.getBoolean("filePIDsEnabled")); } @@ -189,6 +173,59 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { return dv; } + + public Dataverse parseDataverseUpdates(JsonObject jsonObject, Dataverse dataverseToUpdate) throws JsonParseException { + String alias = jsonObject.getString("alias", null); + if (alias != null) { + dataverseToUpdate.setAlias(alias); + } + + String name = jsonObject.getString("name", null); + if (name != null) { + dataverseToUpdate.setName(name); + } + + String description = jsonObject.getString("description", null); + if (description != null) { + dataverseToUpdate.setDescription(description); + } + + String affiliation = jsonObject.getString("affiliation", null); + if (affiliation != null) { + dataverseToUpdate.setAffiliation(affiliation); + } + + updateDataverseType(dataverseToUpdate, jsonObject); + + updateDataverseContacts(dataverseToUpdate, jsonObject); + + return dataverseToUpdate; + } + + private void updateDataverseType(Dataverse dataverse, JsonObject jsonObject) { + String receivedDataverseType = jsonObject.getString("dataverseType", null); + if (receivedDataverseType != null) { + Arrays.stream(Dataverse.DataverseType.values()) + .filter(type -> type.name().equals(receivedDataverseType)) + .findFirst() + .ifPresent(dataverse::setDataverseType); + } + } + + private void updateDataverseContacts(Dataverse dataverse, JsonObject jsonObject) throws JsonParseException { + if (jsonObject.containsKey("dataverseContacts")) { + JsonArray dvContacts = jsonObject.getJsonArray("dataverseContacts"); + int i = 0; + List dvContactList = new LinkedList<>(); + for (JsonValue jsv : dvContacts) { + DataverseContact dvc = new DataverseContact(dataverse); + dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); + dvc.setDisplayOrder(i++); + dvContactList.add(dvc); + } + dataverse.setDataverseContacts(dvContactList); + } + } public DataverseTheme parseDataverseTheme(JsonObject obj) { From f4c3d2c9d9991edd2d02bd760d1fec86547a519c Mon Sep 17 00:00:00 2001 From: GPortas Date: Wed, 16 Oct 2024 16:43:12 +0100 Subject: [PATCH 03/16] Removed: DataverseContact host dataverse re-set --- .../dataverse/engine/command/impl/CreateDataverseCommand.java | 4 ---- .../dataverse/engine/command/impl/UpdateDataverseCommand.java | 4 ---- 2 files changed, 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 2ce16a86297..6957dac416d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -74,10 +74,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - for (DataverseContact dc : created.getDataverseContacts()) { - dc.setDataverse(created); - } - if (metadataBlocks != null && !metadataBlocks.isEmpty()) { created.setMetadataBlockRoot(true); created.setMetadataBlocks(metadataBlocks); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index b1670a264bf..551f0ffdff7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -92,10 +92,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - for (DataverseContact dc : editedDv.getDataverseContacts()) { - dc.setDataverse(editedDv); - } - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); DataverseType oldDvType = oldDv.getDataverseType(); From 8ef8cfd2c70d34f458d5bad33d7b790c3150b409 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 13:35:44 +0100 Subject: [PATCH 04/16] Added: parseDataverseUpdates unit test --- .../dataverse/util/json/JsonParserTest.java | 54 ++++++++++++------- 1 file changed, 35 insertions(+), 19 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 59e175f30c1..1a1d836f6a0 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -4,17 +4,9 @@ package edu.harvard.iq.dataverse.util.json; -import edu.harvard.iq.dataverse.ControlledVocabularyValue; -import edu.harvard.iq.dataverse.Dataset; -import edu.harvard.iq.dataverse.DatasetField; -import edu.harvard.iq.dataverse.DatasetFieldCompoundValue; -import edu.harvard.iq.dataverse.DatasetFieldType; +import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; -import edu.harvard.iq.dataverse.DatasetFieldValue; -import edu.harvard.iq.dataverse.DatasetVersion; -import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.DataverseTheme.Alignment; -import edu.harvard.iq.dataverse.FileMetadata; import edu.harvard.iq.dataverse.UserNotification.Type; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupProvider; @@ -50,16 +42,7 @@ import java.io.StringReader; import java.math.BigDecimal; import java.text.ParseException; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Collections; -import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; -import java.util.Set; -import java.util.TimeZone; +import java.util.*; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.*; @@ -281,6 +264,39 @@ public void testParseCompleteDataverse() throws JsonParseException { throw new JsonParseException("Couldn't read test file", ioe); } } + + /** + * TODO + * @throws JsonParseException when this test is broken. + */ + @Test + public void parseDataverseUpdates() throws JsonParseException { + Dataverse dataverse = new Dataverse(); + dataverse.setName("Name to update"); + dataverse.setAlias("aliasToUpdate"); + dataverse.setAffiliation("Affiliation to update"); + dataverse.setDescription("Description to update"); + dataverse.setDataverseType(Dataverse.DataverseType.DEPARTMENT); + List originalContacts = new ArrayList<>(); + originalContacts.add(new DataverseContact(dataverse, "updatethis@example.edu")); + dataverse.setDataverseContacts(originalContacts); + JsonObject dvJson; + try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) { + dvJson = Json.createReader(reader).readObject(); + Dataverse actual = sut.parseDataverseUpdates(dvJson, dataverse); + assertEquals("Scientific Research", actual.getName()); + assertEquals("science", actual.getAlias()); + assertEquals("Scientific Research University", actual.getAffiliation()); + assertEquals("We do all the science.", actual.getDescription()); + assertEquals("LABORATORY", actual.getDataverseType().toString()); + assertEquals(2, actual.getDataverseContacts().size()); + assertEquals("pi@example.edu,student@example.edu", actual.getContactEmails()); + assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); + assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); + } catch (IOException ioe) { + throw new JsonParseException("Couldn't read test file", ioe); + } + } @Test public void testParseThemeDataverse() throws JsonParseException { From 62df2a7d534b87cd8975fd01317d3d1a05576e4a Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:36:27 +0100 Subject: [PATCH 05/16] Changed: reordered logic in UpdateDataverseCommand for further refactoring --- .../command/impl/UpdateDataverseCommand.java | 56 +++++++++++-------- 1 file changed, 34 insertions(+), 22 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 551f0ffdff7..16b93debb6d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -78,6 +78,11 @@ public UpdateDataverseCommand(Dataverse editedDv, public Dataverse execute(CommandContext ctxt) throws CommandException { logger.fine("Entering update dataverse command"); + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + editedDv.setMetadataBlockRoot(true); + editedDv.setMetadataBlocks(metadataBlocks); + } + // Perform any optional validation steps, if defined: if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { // For admins, an override of the external validation step may be enabled: @@ -98,39 +103,46 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { String oldDvAlias = oldDv.getAlias(); String oldDvName = oldDv.getName(); - Dataverse result = ctxt.dataverses().save(editedDv); - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(result); - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df.getId(), result.getId()); - } + // We don't want to reindex the children datasets unnecessarily: + // When these values are changed we need to reindex all children datasets + // This check is not recursive as all the values just report the immediate parent + if (!oldDvType.equals(editedDv.getDataverseType()) + || !oldDvName.equals(editedDv.getName()) + || !oldDvAlias.equals(editedDv.getAlias())) { + datasetsReindexRequired = true; } + if (featuredDataverseList != null) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(result); + ctxt.featuredDataverses().deleteFeaturedDataversesFor(editedDv); int i = 0; for (Object obj : featuredDataverseList) { Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), result.getId()); + ctxt.featuredDataverses().create(i++, dv.getId(), editedDv.getId()); } } - if (inputLevelList != null) { - ctxt.fieldTypeInputLevels().deleteFacetsFor(result); - for (DataverseFieldTypeInputLevel obj : inputLevelList) { - ctxt.fieldTypeInputLevels().create(obj); + + if (facetList != null) { + ctxt.facets().deleteFacetsFor(editedDv); + if (!facetList.isEmpty()) { + editedDv.setFacetRoot(true); + } + int i = 0; + for (DatasetFieldType df : facetList) { + ctxt.facets().create(i++, df, editedDv); } } - - // We don't want to reindex the children datasets unnecessarily: - // When these values are changed we need to reindex all children datasets - // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) - || !oldDvName.equals(editedDv.getName()) - || !oldDvAlias.equals(editedDv.getAlias())) { - datasetsReindexRequired = true; + if (inputLevelList != null) { + if (!inputLevelList.isEmpty()) { + editedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); + } + ctxt.fieldTypeInputLevels().deleteFacetsFor(editedDv); + for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { + inputLevel.setDataverse(editedDv); + ctxt.fieldTypeInputLevels().create(inputLevel); + } } + Dataverse result = ctxt.dataverses().save(editedDv); return result; } From 6ccbb4ae53ee6bdd573b686e98c964ecf4e8d2db Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:37:05 +0100 Subject: [PATCH 06/16] Changed: updateDataverse return code --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 0bc389041c2..d8bd2b8cb4b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -188,7 +188,7 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); - return created("/dataverses/" + updatedDataverse.getAlias(), json(updatedDataverse)); + return ok(json(updatedDataverse)); } catch (WrappedResponse ww) { return handleWrappedResponse(ww); From 5c1703906dfeb205604dd8608b286ee706295e2d Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:37:47 +0100 Subject: [PATCH 07/16] Added: IT for updateDataverse endpoint --- .../iq/dataverse/api/DataversesIT.java | 43 +++++++++++++ .../edu/harvard/iq/dataverse/api/UtilIT.java | 62 +++++++++++++++++++ 2 files changed, 105 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 8c6a8244af1..7abc35d536a 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1253,6 +1253,49 @@ public void testAddDataverse() { .body("message", equalTo("Invalid metadata block name: \"" + invalidMetadataBlockName + "\"")); } + @Test + public void testUpdateDataverse() { + Response createUser = UtilIT.createRandomUser(); + String apiToken = UtilIT.getApiTokenFromResponse(createUser); + String testAliasSuffix = "-update-dataverse"; + + String testDataverseAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + Response createSubDataverseResponse = UtilIT.createSubDataverse(testDataverseAlias, null, apiToken, "root"); + createSubDataverseResponse.then().assertThat().statusCode(CREATED.getStatusCode()); + + String newAlias = UtilIT.getRandomDvAlias() + testAliasSuffix; + String newName = "New Test Dataverse Name"; + String newAffiliation = "New Test Dataverse Affiliation"; + String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString(); + String[] newContactEmails = new String[] {"new_email@dataverse.com"}; + String[] newInputLevelNames = new String[] {"geographicCoverage"}; + String[] newFacetIds = new String[] {"contributorName"}; + String[] newMetadataBlockNames = new String[] {"citation", "geospatial", "biomedical"}; + + Response updateDataverseResponse = UtilIT.updateDataverse( + testDataverseAlias, + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + + updateDataverseResponse.prettyPrint(); + updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); + + // TODO add more assertions and cases + + // The alias has been changed, so we should not be able to do any operation using the old one + String oldDataverseAlias = testDataverseAlias; + Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); + getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + } + @Test public void testListFacets() { Response createUserResponse = UtilIT.createRandomUser(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index 70f49d81b35..eb40a85f10c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -1,5 +1,6 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder; import io.restassured.http.ContentType; import io.restassured.path.json.JsonPath; import io.restassured.response.Response; @@ -12,6 +13,7 @@ import jakarta.json.JsonArrayBuilder; import jakarta.json.JsonObject; +import static edu.harvard.iq.dataverse.util.json.NullSafeJsonBuilder.jsonObjectBuilder; import static jakarta.ws.rs.core.Response.Status.CREATED; import java.nio.charset.StandardCharsets; @@ -428,6 +430,66 @@ static Response createSubDataverse(String alias, String category, String apiToke return createDataverseResponse; } + static Response updateDataverse(String alias, + String newAlias, + String newName, + String newAffiliation, + String newDataverseType, + String[] newContactEmails, + String[] newInputLevelNames, + String[] newFacetIds, + String[] newMetadataBlockNames, + String apiToken) { + JsonArrayBuilder contactArrayBuilder = Json.createArrayBuilder(); + for(String contactEmail : newContactEmails) { + contactArrayBuilder.add(Json.createObjectBuilder().add("contactEmail", contactEmail)); + } + NullSafeJsonBuilder jsonBuilder = jsonObjectBuilder() + .add("alias", newAlias) + .add("name", newName) + .add("affiliation", newAffiliation) + .add("dataverseContacts", contactArrayBuilder) + .add("dataverseType", newDataverseType) + .add("affiliation", newAffiliation); + + JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); + + if (newInputLevelNames != null) { + JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); + for(String inputLevelName : newInputLevelNames) { + inputLevelsArrayBuilder.add(Json.createObjectBuilder() + .add("datasetFieldTypeName", inputLevelName) + .add("required", true) + .add("include", true) + ); + } + metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); + } + + if (newMetadataBlockNames != null) { + JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); + for(String metadataBlockName : newMetadataBlockNames) { + metadataBlockNamesArrayBuilder.add(metadataBlockName); + } + metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); + } + + if (newFacetIds != null) { + JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); + for(String facetId : newFacetIds) { + facetIdsArrayBuilder.add(facetId); + } + metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); + } + + jsonBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); + + JsonObject dvData = jsonBuilder.build(); + return given() + .body(dvData.toString()).contentType(ContentType.JSON) + .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + } + static Response createDataverse(JsonObject dvData, String apiToken) { Response createDataverseResponse = given() .body(dvData.toString()).contentType(ContentType.JSON) From e5cdb106e22064fe4fc84fa834dae2bf984525ff Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:52:12 +0100 Subject: [PATCH 08/16] Refactor: UtilIT duplication on dataverse write operations --- .../edu/harvard/iq/dataverse/api/UtilIT.java | 66 ++++++------------- 1 file changed, 21 insertions(+), 45 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java index eb40a85f10c..502f1ecb0a8 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/UtilIT.java @@ -391,43 +391,12 @@ static Response createSubDataverse(String alias, String category, String apiToke objectBuilder.add("affiliation", affiliation); } - JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); - - if (inputLevelNames != null) { - JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); - for(String inputLevelName : inputLevelNames) { - inputLevelsArrayBuilder.add(Json.createObjectBuilder() - .add("datasetFieldTypeName", inputLevelName) - .add("required", true) - .add("include", true) - ); - } - metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); - } - - if (metadataBlockNames != null) { - JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); - for(String metadataBlockName : metadataBlockNames) { - metadataBlockNamesArrayBuilder.add(metadataBlockName); - } - metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); - } - - if (facetIds != null) { - JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); - for(String facetId : facetIds) { - facetIdsArrayBuilder.add(facetId); - } - metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); - } - - objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); + updateDataverseRequestJsonWithMetadataBlocksConfiguration(inputLevelNames, facetIds, metadataBlockNames, objectBuilder); JsonObject dvData = objectBuilder.build(); - Response createDataverseResponse = given() + return given() .body(dvData.toString()).contentType(ContentType.JSON) .when().post("/api/dataverses/" + parentDV + "?key=" + apiToken); - return createDataverseResponse; } static Response updateDataverse(String alias, @@ -452,11 +421,23 @@ static Response updateDataverse(String alias, .add("dataverseType", newDataverseType) .add("affiliation", newAffiliation); + updateDataverseRequestJsonWithMetadataBlocksConfiguration(newInputLevelNames, newFacetIds, newMetadataBlockNames, jsonBuilder); + + JsonObject dvData = jsonBuilder.build(); + return given() + .body(dvData.toString()).contentType(ContentType.JSON) + .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + } + + private static void updateDataverseRequestJsonWithMetadataBlocksConfiguration(String[] inputLevelNames, + String[] facetIds, + String[] metadataBlockNames, + JsonObjectBuilder objectBuilder) { JsonObjectBuilder metadataBlocksObjectBuilder = Json.createObjectBuilder(); - if (newInputLevelNames != null) { + if (inputLevelNames != null) { JsonArrayBuilder inputLevelsArrayBuilder = Json.createArrayBuilder(); - for(String inputLevelName : newInputLevelNames) { + for(String inputLevelName : inputLevelNames) { inputLevelsArrayBuilder.add(Json.createObjectBuilder() .add("datasetFieldTypeName", inputLevelName) .add("required", true) @@ -466,28 +447,23 @@ static Response updateDataverse(String alias, metadataBlocksObjectBuilder.add("inputLevels", inputLevelsArrayBuilder); } - if (newMetadataBlockNames != null) { + if (metadataBlockNames != null) { JsonArrayBuilder metadataBlockNamesArrayBuilder = Json.createArrayBuilder(); - for(String metadataBlockName : newMetadataBlockNames) { + for(String metadataBlockName : metadataBlockNames) { metadataBlockNamesArrayBuilder.add(metadataBlockName); } metadataBlocksObjectBuilder.add("metadataBlockNames", metadataBlockNamesArrayBuilder); } - if (newFacetIds != null) { + if (facetIds != null) { JsonArrayBuilder facetIdsArrayBuilder = Json.createArrayBuilder(); - for(String facetId : newFacetIds) { + for(String facetId : facetIds) { facetIdsArrayBuilder.add(facetId); } metadataBlocksObjectBuilder.add("facetIds", facetIdsArrayBuilder); } - jsonBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); - - JsonObject dvData = jsonBuilder.build(); - return given() - .body(dvData.toString()).contentType(ContentType.JSON) - .when().put("/api/dataverses/" + alias + "?key=" + apiToken); + objectBuilder.add("metadataBlocks", metadataBlocksObjectBuilder); } static Response createDataverse(JsonObject dvData, String apiToken) { From 8020d50c26a3d41bef41403984495bad535dbad2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 14:56:24 +0100 Subject: [PATCH 09/16] Added: pending doc comment to JsonParserTest method --- .../edu/harvard/iq/dataverse/util/json/JsonParserTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 1a1d836f6a0..2cffa7d921c 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -266,7 +266,8 @@ public void testParseCompleteDataverse() throws JsonParseException { } /** - * TODO + * Test that a JSON object passed for a complete Dataverse update is correctly parsed. + * This checks that all properties are parsed into the correct dataverse properties. * @throws JsonParseException when this test is broken. */ @Test From 2d10f22de0a20ab73ef146d6b90f1fb587672f2a Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 17 Oct 2024 15:49:36 +0100 Subject: [PATCH 10/16] Added: missing IT for updateDataverse endpoint --- .../iq/dataverse/api/DataversesIT.java | 63 ++++++++++++++++--- 1 file changed, 55 insertions(+), 8 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index 7abc35d536a..c311fa1016e 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1,12 +1,15 @@ package edu.harvard.iq.dataverse.api; import io.restassured.RestAssured; + import static io.restassured.RestAssured.given; import static io.restassured.path.json.JsonPath.with; + import io.restassured.response.Response; import edu.harvard.iq.dataverse.Dataverse; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; + import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; @@ -14,6 +17,7 @@ import java.util.Arrays; import java.util.List; import java.util.logging.Logger; + import jakarta.json.Json; import jakarta.json.JsonObject; import jakarta.json.JsonObjectBuilder; @@ -31,6 +35,7 @@ import static org.junit.jupiter.api.Assertions.*; import java.nio.file.Files; + import io.restassured.path.json.JsonPath; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; @@ -43,7 +48,7 @@ public class DataversesIT { public static void setUpClass() { RestAssured.baseURI = UtilIT.getRestAssuredBaseUri(); } - + @AfterAll public static void afterClass() { Response removeExcludeEmail = UtilIT.deleteSetting(SettingsServiceBean.Key.ExcludeEmailFromExport); @@ -1267,10 +1272,10 @@ public void testUpdateDataverse() { String newName = "New Test Dataverse Name"; String newAffiliation = "New Test Dataverse Affiliation"; String newDataverseType = Dataverse.DataverseType.TEACHING_COURSES.toString(); - String[] newContactEmails = new String[] {"new_email@dataverse.com"}; - String[] newInputLevelNames = new String[] {"geographicCoverage"}; - String[] newFacetIds = new String[] {"contributorName"}; - String[] newMetadataBlockNames = new String[] {"citation", "geospatial", "biomedical"}; + String[] newContactEmails = new String[]{"new_email@dataverse.com"}; + String[] newInputLevelNames = new String[]{"geographicCoverage"}; + String[] newFacetIds = new String[]{"contributorName"}; + String[] newMetadataBlockNames = new String[]{"citation", "geospatial", "biomedical"}; Response updateDataverseResponse = UtilIT.updateDataverse( testDataverseAlias, @@ -1285,15 +1290,57 @@ public void testUpdateDataverse() { apiToken ); - updateDataverseResponse.prettyPrint(); + // Assert dataverse properties are updated updateDataverseResponse.then().assertThat().statusCode(OK.getStatusCode()); - - // TODO add more assertions and cases + String actualDataverseAlias = updateDataverseResponse.then().extract().path("data.alias"); + assertEquals(newAlias, actualDataverseAlias); + String actualDataverseName = updateDataverseResponse.then().extract().path("data.name"); + assertEquals(newName, actualDataverseName); + String actualDataverseAffiliation = updateDataverseResponse.then().extract().path("data.affiliation"); + assertEquals(newAffiliation, actualDataverseAffiliation); + String actualDataverseType = updateDataverseResponse.then().extract().path("data.dataverseType"); + assertEquals(newDataverseType, actualDataverseType); + String actualContactEmail = updateDataverseResponse.then().extract().path("data.dataverseContacts[0].contactEmail"); + assertEquals("new_email@dataverse.com", actualContactEmail); + + // Assert metadata blocks are updated + Response listMetadataBlocksResponse = UtilIT.listMetadataBlocks(newAlias, false, false, apiToken); + String actualDataverseMetadataBlock1 = listMetadataBlocksResponse.then().extract().path("data[0].name"); + String actualDataverseMetadataBlock2 = listMetadataBlocksResponse.then().extract().path("data[1].name"); + String actualDataverseMetadataBlock3 = listMetadataBlocksResponse.then().extract().path("data[2].name"); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock1)); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock2)); + assertThat(newMetadataBlockNames, hasItemInArray(actualDataverseMetadataBlock3)); + + // Assert custom facets are updated + Response listDataverseFacetsResponse = UtilIT.listDataverseFacets(newAlias, apiToken); + String actualFacetName = listDataverseFacetsResponse.then().extract().path("data[0]"); + assertThat(newFacetIds, hasItemInArray(actualFacetName)); + + // Assert input levels are updated + Response listDataverseInputLevelsResponse = UtilIT.listDataverseInputLevels(newAlias, apiToken); + String actualInputLevelName = listDataverseInputLevelsResponse.then().extract().path("data[0].datasetFieldTypeName"); + assertThat(newInputLevelNames, hasItemInArray(actualInputLevelName)); // The alias has been changed, so we should not be able to do any operation using the old one String oldDataverseAlias = testDataverseAlias; Response getDataverseResponse = UtilIT.listDataverseFacets(oldDataverseAlias, apiToken); getDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // Should return error when the dataverse to edit does not exist + updateDataverseResponse = UtilIT.updateDataverse( + "unexistingDataverseAlias", + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); } @Test From d334b689437e06cf674a6725600c705628845c47 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 09:41:55 +0200 Subject: [PATCH 11/16] Refactor: CreateDataverseCommand inheriting AbstractWriteDataverseCommand --- .../impl/AbstractWriteDataverseCommand.java | 84 +++++++++++++++ .../command/impl/CreateDataverseCommand.java | 102 +++++------------- 2 files changed, 110 insertions(+), 76 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java new file mode 100644 index 00000000000..577f877db41 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -0,0 +1,84 @@ +package edu.harvard.iq.dataverse.engine.command.impl; + +import edu.harvard.iq.dataverse.*; +import edu.harvard.iq.dataverse.engine.command.AbstractCommand; +import edu.harvard.iq.dataverse.engine.command.CommandContext; +import edu.harvard.iq.dataverse.engine.command.DataverseRequest; +import edu.harvard.iq.dataverse.engine.command.exception.CommandException; +import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; + +import java.util.ArrayList; +import java.util.List; + +/** + * TODO + */ +abstract class AbstractWriteDataverseCommand extends AbstractCommand { + + protected Dataverse dataverse; + private final List inputLevels; + private final List facets; + protected final List metadataBlocks; + + public AbstractWriteDataverseCommand(Dataverse dataverse, + DataverseRequest request, + List facets, + List inputLevels, + List metadataBlocks) { + super(request, dataverse.getOwner()); + this.dataverse = dataverse; + if (facets != null) { + this.facets = new ArrayList<>(facets); + } else { + this.facets = null; + } + if (inputLevels != null) { + this.inputLevels = new ArrayList<>(inputLevels); + } else { + this.inputLevels = null; + } + if (metadataBlocks != null) { + this.metadataBlocks = new ArrayList<>(metadataBlocks); + } else { + this.metadataBlocks = null; + } + } + + @Override + public Dataverse execute(CommandContext ctxt) throws CommandException { + dataverse = innerExecute(ctxt); + + if (metadataBlocks != null && !metadataBlocks.isEmpty()) { + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); + } + + if (facets != null) { + ctxt.facets().deleteFacetsFor(dataverse); + + if (!facets.isEmpty()) { + dataverse.setFacetRoot(true); + } + + int i = 0; + for (DatasetFieldType df : facets) { + ctxt.facets().create(i++, df, dataverse); + } + } + + if (inputLevels != null) { + if (!inputLevels.isEmpty()) { + dataverse.addInputLevelsMetadataBlocksIfNotPresent(inputLevels); + } + ctxt.fieldTypeInputLevels().deleteFacetsFor(dataverse); + for (DataverseFieldTypeInputLevel inputLevel : inputLevels) { + inputLevel.setDataverse(dataverse); + ctxt.fieldTypeInputLevels().create(inputLevel); + } + } + + return ctxt.dataverses().save(dataverse); + } + + abstract protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException; +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index 6957dac416d..ce922dc565d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -6,11 +6,9 @@ import edu.harvard.iq.dataverse.authorization.groups.Group; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.authorization.users.User; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; @@ -27,47 +25,26 @@ * @author michael */ @RequiredPermissions(Permission.AddDataverse) -public class CreateDataverseCommand extends AbstractCommand { - - private final Dataverse created; - private final List inputLevelList; - private final List facetList; - private final List metadataBlocks; +public class CreateDataverseCommand extends AbstractWriteDataverseCommand { public CreateDataverseCommand(Dataverse created, - DataverseRequest aRequest, - List facetList, - List inputLevelList) { - this(created, aRequest, facetList, inputLevelList, null); + DataverseRequest request, + List facets, + List inputLevels) { + this(created, request, facets, inputLevels, null); } public CreateDataverseCommand(Dataverse created, - DataverseRequest aRequest, - List facetList, - List inputLevelList, + DataverseRequest request, + List facets, + List inputLevels, List metadataBlocks) { - super(aRequest, created.getOwner()); - this.created = created; - if (facetList != null) { - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (inputLevelList != null) { - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - if (metadataBlocks != null) { - this.metadataBlocks = new ArrayList<>(metadataBlocks); - } else { - this.metadataBlocks = null; - } + super(created, request, facets, inputLevels, metadataBlocks); } @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - Dataverse owner = created.getOwner(); + protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException { + Dataverse owner = dataverse.getOwner(); if (owner == null) { if (ctxt.dataverses().isRootDataverseExists()) { throw new IllegalCommandException("Root Dataverse already exists. Cannot create another one", this); @@ -75,44 +52,44 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - created.setMetadataBlockRoot(true); - created.setMetadataBlocks(metadataBlocks); + dataverse.setMetadataBlockRoot(true); + dataverse.setMetadataBlocks(metadataBlocks); } - if (created.getCreateDate() == null) { - created.setCreateDate(new Timestamp(new Date().getTime())); + if (dataverse.getCreateDate() == null) { + dataverse.setCreateDate(new Timestamp(new Date().getTime())); } - if (created.getCreator() == null) { + if (dataverse.getCreator() == null) { final User user = getRequest().getUser(); if (user.isAuthenticated()) { - created.setCreator((AuthenticatedUser) user); + dataverse.setCreator((AuthenticatedUser) user); } else { throw new IllegalCommandException("Guest users cannot create a Dataverse.", this); } } - if (created.getDataverseType() == null) { - created.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); + if (dataverse.getDataverseType() == null) { + dataverse.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); } - if (created.getDefaultContributorRole() == null) { - created.setDefaultContributorRole(ctxt.roles().findBuiltinRoleByAlias(DataverseRole.EDITOR)); + if (dataverse.getDefaultContributorRole() == null) { + dataverse.setDefaultContributorRole(ctxt.roles().findBuiltinRoleByAlias(DataverseRole.EDITOR)); } // @todo for now we are saying all dataverses are permission root - created.setPermissionRoot(true); + dataverse.setPermissionRoot(true); - if (ctxt.dataverses().findByAlias(created.getAlias()) != null) { - throw new IllegalCommandException("A dataverse with alias " + created.getAlias() + " already exists", this); + if (ctxt.dataverses().findByAlias(dataverse.getAlias()) != null) { + throw new IllegalCommandException("A dataverse with alias " + dataverse.getAlias() + " already exists", this); } - if (created.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { + if (dataverse.getFilePIDsEnabled() != null && !ctxt.settings().isTrueForKey(SettingsServiceBean.Key.AllowEnablingFilePIDsPerCollection, false)) { throw new IllegalCommandException("File PIDs cannot be enabled per collection", this); } // Save the dataverse - Dataverse managedDv = ctxt.dataverses().save(created); + Dataverse managedDv = ctxt.dataverses().save(dataverse); // Find the built in admin role (currently by alias) DataverseRole adminRole = ctxt.roles().findBuiltinRoleByAlias(DataverseRole.ADMIN); @@ -159,33 +136,6 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } managedDv.setPermissionModificationTime(new Timestamp(new Date().getTime())); - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(managedDv); - - if (!facetList.isEmpty()) { - managedDv.setFacetRoot(true); - } - - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df, managedDv); - } - } - - if (inputLevelList != null) { - if (!inputLevelList.isEmpty()) { - managedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); - } - ctxt.fieldTypeInputLevels().deleteFacetsFor(managedDv); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - inputLevel.setDataverse(managedDv); - ctxt.fieldTypeInputLevels().create(inputLevel); - } - } - - // TODO: save is called here and above; we likely don't need both - managedDv = ctxt.dataverses().save(managedDv); return managedDv; } From e7782394b037fb6890f785cebd6f12869630c6c6 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 10:57:54 +0200 Subject: [PATCH 12/16] Refactor: UpdateDataverseCommand inheriting AbstractWriteDataverseCommand --- .../impl/AbstractWriteDataverseCommand.java | 5 +- .../command/impl/CreateDataverseCommand.java | 2 +- .../command/impl/UpdateDataverseCommand.java | 102 ++++-------------- 3 files changed, 27 insertions(+), 82 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java index 577f877db41..40c2abf5d21 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/AbstractWriteDataverseCommand.java @@ -11,7 +11,7 @@ import java.util.List; /** - * TODO + * An abstract base class for commands that perform write operations on {@link Dataverse}s. */ abstract class AbstractWriteDataverseCommand extends AbstractCommand { @@ -21,11 +21,12 @@ abstract class AbstractWriteDataverseCommand extends AbstractCommand protected final List metadataBlocks; public AbstractWriteDataverseCommand(Dataverse dataverse, + Dataverse affectedDataverse, DataverseRequest request, List facets, List inputLevels, List metadataBlocks) { - super(request, dataverse.getOwner()); + super(request, affectedDataverse); this.dataverse = dataverse; if (facets != null) { this.facets = new ArrayList<>(facets); diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java index ce922dc565d..145cfb6199c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/CreateDataverseCommand.java @@ -39,7 +39,7 @@ public CreateDataverseCommand(Dataverse created, List facets, List inputLevels, List metadataBlocks) { - super(created, request, facets, inputLevels, metadataBlocks); + super(created, created.getOwner(), request, facets, inputLevels, metadataBlocks); } @Override diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 16b93debb6d..14d9e408be8 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -6,16 +6,13 @@ import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; -import edu.harvard.iq.dataverse.engine.command.AbstractCommand; import edu.harvard.iq.dataverse.engine.command.CommandContext; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.RequiredPermissions; -import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException; import java.util.ArrayList; import java.util.List; -import java.util.logging.Logger; /** * Update an existing dataverse. @@ -23,72 +20,41 @@ * @author michael */ @RequiredPermissions(Permission.EditDataverse) -public class UpdateDataverseCommand extends AbstractCommand { - private static final Logger logger = Logger.getLogger(UpdateDataverseCommand.class.getName()); - - private final Dataverse editedDv; - private final List facetList; +public class UpdateDataverseCommand extends AbstractWriteDataverseCommand { private final List featuredDataverseList; - private final List inputLevelList; - private final List metadataBlocks; private boolean datasetsReindexRequired = false; public UpdateDataverseCommand(Dataverse editedDv, - List facetList, - List featuredDataverseList, - DataverseRequest aRequest, - List inputLevelList) { - this(editedDv, facetList, featuredDataverseList, aRequest, inputLevelList, null); + List facets, + List featuredDataverses, + DataverseRequest request, + List inputLevels) { + this(editedDv, facets, featuredDataverses, request, inputLevels, null); } public UpdateDataverseCommand(Dataverse editedDv, - List facetList, - List featuredDataverseList, - DataverseRequest aRequest, - List inputLevelList, + List facets, + List featuredDataverses, + DataverseRequest request, + List inputLevels, List metadataBlocks) { - super(aRequest, editedDv); - this.editedDv = editedDv; - // add update template uses this command but does not - // update facet list or featured dataverses - if (facetList != null) { - this.facetList = new ArrayList<>(facetList); - } else { - this.facetList = null; - } - if (featuredDataverseList != null) { - this.featuredDataverseList = new ArrayList<>(featuredDataverseList); + super(editedDv, editedDv, request, facets, inputLevels, metadataBlocks); + if (featuredDataverses != null) { + this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { this.featuredDataverseList = null; } - if (inputLevelList != null) { - this.inputLevelList = new ArrayList<>(inputLevelList); - } else { - this.inputLevelList = null; - } - if (metadataBlocks != null) { - this.metadataBlocks = new ArrayList<>(metadataBlocks); - } else { - this.metadataBlocks = null; - } } @Override - public Dataverse execute(CommandContext ctxt) throws CommandException { - logger.fine("Entering update dataverse command"); - - if (metadataBlocks != null && !metadataBlocks.isEmpty()) { - editedDv.setMetadataBlockRoot(true); - editedDv.setMetadataBlocks(metadataBlocks); - } - + protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandException { // Perform any optional validation steps, if defined: if (ctxt.systemConfig().isExternalDataverseValidationEnabled()) { // For admins, an override of the external validation step may be enabled: if (!(getUser().isSuperuser() && ctxt.systemConfig().isExternalValidationAdminOverrideEnabled())) { String executable = ctxt.systemConfig().getDataverseValidationExecutable(); - boolean result = validateDataverseMetadataExternally(editedDv, executable, getRequest()); + boolean result = validateDataverseMetadataExternally(dataverse, executable, getRequest()); if (!result) { String rejectionMessage = ctxt.systemConfig().getDataverseUpdateValidationFailureMsg(); @@ -97,7 +63,7 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { } } - Dataverse oldDv = ctxt.dataverses().find(editedDv.getId()); + Dataverse oldDv = ctxt.dataverses().find(dataverse.getId()); DataverseType oldDvType = oldDv.getDataverseType(); String oldDvAlias = oldDv.getAlias(); @@ -106,44 +72,22 @@ public Dataverse execute(CommandContext ctxt) throws CommandException { // We don't want to reindex the children datasets unnecessarily: // When these values are changed we need to reindex all children datasets // This check is not recursive as all the values just report the immediate parent - if (!oldDvType.equals(editedDv.getDataverseType()) - || !oldDvName.equals(editedDv.getName()) - || !oldDvAlias.equals(editedDv.getAlias())) { + if (!oldDvType.equals(dataverse.getDataverseType()) + || !oldDvName.equals(dataverse.getName()) + || !oldDvAlias.equals(dataverse.getAlias())) { datasetsReindexRequired = true; } if (featuredDataverseList != null) { - ctxt.featuredDataverses().deleteFeaturedDataversesFor(editedDv); + ctxt.featuredDataverses().deleteFeaturedDataversesFor(dataverse); int i = 0; for (Object obj : featuredDataverseList) { Dataverse dv = (Dataverse) obj; - ctxt.featuredDataverses().create(i++, dv.getId(), editedDv.getId()); - } - } - - if (facetList != null) { - ctxt.facets().deleteFacetsFor(editedDv); - if (!facetList.isEmpty()) { - editedDv.setFacetRoot(true); - } - int i = 0; - for (DatasetFieldType df : facetList) { - ctxt.facets().create(i++, df, editedDv); - } - } - if (inputLevelList != null) { - if (!inputLevelList.isEmpty()) { - editedDv.addInputLevelsMetadataBlocksIfNotPresent(inputLevelList); - } - ctxt.fieldTypeInputLevels().deleteFacetsFor(editedDv); - for (DataverseFieldTypeInputLevel inputLevel : inputLevelList) { - inputLevel.setDataverse(editedDv); - ctxt.fieldTypeInputLevels().create(inputLevel); + ctxt.featuredDataverses().create(i++, dv.getId(), dataverse.getId()); } } - Dataverse result = ctxt.dataverses().save(editedDv); - return result; + return dataverse; } @Override @@ -154,7 +98,7 @@ public boolean onSuccess(CommandContext ctxt, Object r) { // It appears that we at some point lost some extra logic here, where // we only reindex the underlying datasets if one or more of the specific set // of fields have been changed (since these values are included in the - // indexed solr documents for dataasets). So I'm putting that back. -L.A. + // indexed solr documents for datasets). So I'm putting that back. -L.A. Dataverse result = (Dataverse) r; if (datasetsReindexRequired) { From 4e90d0c3fe8d501f5810a162c304ce4e3b43a891 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 16:40:43 +0100 Subject: [PATCH 13/16] Added: docs for #10904 --- doc/sphinx-guides/source/api/native-api.rst | 52 +++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index f8b8620f121..6254742eebb 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -74,6 +74,58 @@ The request JSON supports an optional ``metadataBlocks`` object, with the follow To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. +.. _update-dataverse-api: + +Update a Dataverse Collection +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Updates an existing Dataverse collection using a JSON file following the same structure as the one used in the API for the creation. (see :ref:`create-dataverse-api`). + +The steps for updating a Dataverse collection are: + +- Prepare a JSON file containing the fields for the properties you want to update. You do not need to include all the properties, only the ones you want to update. +- Execute a curl command or equivalent. + +As an example, you can download :download:`dataverse-complete.json <../_static/api/dataverse-complete.json>` file and modify it to suit your needs. The controlled vocabulary for ``dataverseType`` is the following: + +- ``DEPARTMENT`` +- ``JOURNALS`` +- ``LABORATORY`` +- ``ORGANIZATIONS_INSTITUTIONS`` +- ``RESEARCHERS`` +- ``RESEARCH_GROUP`` +- ``RESEARCH_PROJECTS`` +- ``TEACHING_COURSES`` +- ``UNCATEGORIZED`` + +The curl command below assumes you are using the name "dataverse-complete.json" and that this file is in your current working directory. + +Next you need to figure out the alias or database id of the Dataverse collection you want to update. + +.. code-block:: bash + + export API_TOKEN=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx + export SERVER_URL=https://demo.dataverse.org + export DV_ALIAS=dvAlias + + curl -H "X-Dataverse-key:$API_TOKEN" -X PUT "$SERVER_URL/api/dataverses/$DV_ALIAS" --upload-file dataverse-complete.json + +The fully expanded example above (without environment variables) looks like this: + +.. code-block:: bash + + curl -H "X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" -X PUT "https://demo.dataverse.org/api/dataverses/dvAlias" --upload-file dataverse-complete.json + +You should expect an HTTP 200 response and JSON beginning with "status":"OK" followed by a representation of the updated Dataverse collection. + +Same as in :ref:`create-dataverse-api`, the request JSON supports an optional ``metadataBlocks`` object, with the following supported sub-objects: + +- ``metadataBlockNames``: The names of the metadata blocks you want to add to the Dataverse collection. +- ``inputLevels``: The names of the fields in each metadata block for which you want to add a custom configuration regarding their inclusion or requirement when creating and editing datasets in the new Dataverse collection. Note that if the corresponding metadata blocks names are not specified in the ``metadataBlockNames``` field, they will be added automatically to the Dataverse collection. +- ``facetIds``: The names of the fields to use as facets for browsing datasets and collections in the new Dataverse collection. Note that the order of the facets is defined by their order in the provided JSON array. + +To obtain an example of how these objects are included in the JSON file, download :download:`dataverse-complete-optional-params.json <../_static/api/dataverse-complete-optional-params.json>` file and modify it to suit your needs. + .. _view-dataverse: View a Dataverse Collection From 6aac751d55375e7433d01d500f38b8be83a7b5bc Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 16:44:09 +0100 Subject: [PATCH 14/16] Added: release notes for #10904 --- doc/release-notes/10904-edit-dataverse-collection-endpoint.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 doc/release-notes/10904-edit-dataverse-collection-endpoint.md diff --git a/doc/release-notes/10904-edit-dataverse-collection-endpoint.md b/doc/release-notes/10904-edit-dataverse-collection-endpoint.md new file mode 100644 index 00000000000..b9256941eea --- /dev/null +++ b/doc/release-notes/10904-edit-dataverse-collection-endpoint.md @@ -0,0 +1 @@ +Adds a new endpoint (`PUT /api/dataverses/`) for updating an existing Dataverse collection using a JSON file following the same structure as the one used in the API for the creation. From 4f98be6a1bcec06ffcada8098e57baf4ea0dd9d2 Mon Sep 17 00:00:00 2001 From: GPortas Date: Mon, 21 Oct 2024 17:37:26 +0100 Subject: [PATCH 15/16] Removed: unnecessary line in updateDataverse endpoint --- src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index d8bd2b8cb4b..895d073bb47 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -184,8 +184,6 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod List metadataBlocks = parseMetadataBlocks(body); List facets = parseFacets(body); - updatedDataverse.setId(originalDataverse.getId()); - AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); return ok(json(updatedDataverse)); From 0b5f9a834b01dff526dbe76c250a5707a04a4656 Mon Sep 17 00:00:00 2001 From: GPortas Date: Thu, 24 Oct 2024 15:46:59 +0100 Subject: [PATCH 16/16] Changed: handling properties update through a DTO object for updateDataverse endpoint --- .../harvard/iq/dataverse/api/Dataverses.java | 42 +++++--- .../iq/dataverse/api/dto/DataverseDTO.java | 63 ++++++++++++ .../command/impl/UpdateDataverseCommand.java | 42 +++++++- .../iq/dataverse/util/json/JsonParser.java | 99 ++++++++++--------- .../iq/dataverse/api/DataversesIT.java | 20 ++++ .../dataverse/util/json/JsonParserTest.java | 20 ++-- 6 files changed, 204 insertions(+), 82 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java index 895d073bb47..25176b85689 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Dataverses.java @@ -3,12 +3,9 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.api.auth.AuthRequired; import edu.harvard.iq.dataverse.api.datadeposit.SwordServiceBean; -import edu.harvard.iq.dataverse.api.dto.DataverseMetadataBlockFacetDTO; +import edu.harvard.iq.dataverse.api.dto.*; import edu.harvard.iq.dataverse.authorization.DataverseRole; -import edu.harvard.iq.dataverse.api.dto.ExplicitGroupDTO; -import edu.harvard.iq.dataverse.api.dto.RoleAssignmentDTO; -import edu.harvard.iq.dataverse.api.dto.RoleDTO; import edu.harvard.iq.dataverse.api.imports.ImportException; import edu.harvard.iq.dataverse.api.imports.ImportServiceBean; import edu.harvard.iq.dataverse.authorization.Permission; @@ -128,7 +125,7 @@ public Response addRoot(@Context ContainerRequestContext crc, String body) { public Response addDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String parentIdtf) { Dataverse newDataverse; try { - newDataverse = parseAndValidateDataverseRequestBody(body, null); + newDataverse = parseAndValidateAddDataverseRequestBody(body); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -159,20 +156,33 @@ public Response addDataverse(@Context ContainerRequestContext crc, String body, } } + private Dataverse parseAndValidateAddDataverseRequestBody(String body) throws JsonParsingException, JsonParseException { + try { + JsonObject addDataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverse(addDataverseJson); + } catch (JsonParsingException jpe) { + logger.log(Level.SEVERE, "Json: {0}", body); + throw jpe; + } catch (JsonParseException ex) { + logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); + throw ex; + } + } + @PUT @AuthRequired @Path("{identifier}") public Response updateDataverse(@Context ContainerRequestContext crc, String body, @PathParam("identifier") String identifier) { - Dataverse originalDataverse; + Dataverse dataverse; try { - originalDataverse = findDataverseOrDie(identifier); + dataverse = findDataverseOrDie(identifier); } catch (WrappedResponse e) { return e.getResponse(); } - Dataverse updatedDataverse; + DataverseDTO updatedDataverseDTO; try { - updatedDataverse = parseAndValidateDataverseRequestBody(body, originalDataverse); + updatedDataverseDTO = parseAndValidateUpdateDataverseRequestBody(body); } catch (JsonParsingException jpe) { return error(Status.BAD_REQUEST, MessageFormat.format(BundleUtil.getStringFromBundle("dataverse.create.error.jsonparse"), jpe.getMessage())); } catch (JsonParseException ex) { @@ -180,13 +190,13 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod } try { - List inputLevels = parseInputLevels(body, originalDataverse); + List inputLevels = parseInputLevels(body, dataverse); List metadataBlocks = parseMetadataBlocks(body); List facets = parseFacets(body); AuthenticatedUser u = getRequestAuthenticatedUserOrDie(crc); - updatedDataverse = execCommand(new UpdateDataverseCommand(updatedDataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks)); - return ok(json(updatedDataverse)); + dataverse = execCommand(new UpdateDataverseCommand(dataverse, facets, null, createDataverseRequest(u), inputLevels, metadataBlocks, updatedDataverseDTO)); + return ok(json(dataverse)); } catch (WrappedResponse ww) { return handleWrappedResponse(ww); @@ -198,15 +208,15 @@ public Response updateDataverse(@Context ContainerRequestContext crc, String bod } } - private Dataverse parseAndValidateDataverseRequestBody(String body, Dataverse dataverseToUpdate) throws JsonParsingException, JsonParseException { + private DataverseDTO parseAndValidateUpdateDataverseRequestBody(String body) throws JsonParsingException, JsonParseException { try { - JsonObject dataverseJson = JsonUtil.getJsonObject(body); - return dataverseToUpdate != null ? jsonParser().parseDataverseUpdates(dataverseJson, dataverseToUpdate) : jsonParser().parseDataverse(dataverseJson); + JsonObject updateDataverseJson = JsonUtil.getJsonObject(body); + return jsonParser().parseDataverseDTO(updateDataverseJson); } catch (JsonParsingException jpe) { logger.log(Level.SEVERE, "Json: {0}", body); throw jpe; } catch (JsonParseException ex) { - logger.log(Level.SEVERE, "Error parsing dataverse from json: " + ex.getMessage(), ex); + logger.log(Level.SEVERE, "Error parsing DataverseDTO from json: " + ex.getMessage(), ex); throw ex; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java b/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java new file mode 100644 index 00000000000..4f2f1032c07 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/api/dto/DataverseDTO.java @@ -0,0 +1,63 @@ +package edu.harvard.iq.dataverse.api.dto; + +import edu.harvard.iq.dataverse.Dataverse; +import edu.harvard.iq.dataverse.DataverseContact; + +import java.util.List; + +public class DataverseDTO { + private String alias; + private String name; + private String description; + private String affiliation; + private List dataverseContacts; + private Dataverse.DataverseType dataverseType; + + public String getAlias() { + return alias; + } + + public void setAlias(String alias) { + this.alias = alias; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getAffiliation() { + return affiliation; + } + + public void setAffiliation(String affiliation) { + this.affiliation = affiliation; + } + + public List getDataverseContacts() { + return dataverseContacts; + } + + public void setDataverseContacts(List dataverseContacts) { + this.dataverseContacts = dataverseContacts; + } + + public Dataverse.DataverseType getDataverseType() { + return dataverseType; + } + + public void setDataverseType(Dataverse.DataverseType dataverseType) { + this.dataverseType = dataverseType; + } +} diff --git a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java index 14d9e408be8..55cc3708097 100644 --- a/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java +++ b/src/main/java/edu/harvard/iq/dataverse/engine/command/impl/UpdateDataverseCommand.java @@ -2,6 +2,7 @@ import edu.harvard.iq.dataverse.*; import edu.harvard.iq.dataverse.Dataverse.DataverseType; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.authorization.Permission; import static edu.harvard.iq.dataverse.dataverse.DataverseUtil.validateDataverseMetadataExternally; @@ -22,29 +23,32 @@ @RequiredPermissions(Permission.EditDataverse) public class UpdateDataverseCommand extends AbstractWriteDataverseCommand { private final List featuredDataverseList; + private final DataverseDTO updatedDataverseDTO; private boolean datasetsReindexRequired = false; - public UpdateDataverseCommand(Dataverse editedDv, + public UpdateDataverseCommand(Dataverse dataverse, List facets, List featuredDataverses, DataverseRequest request, List inputLevels) { - this(editedDv, facets, featuredDataverses, request, inputLevels, null); + this(dataverse, facets, featuredDataverses, request, inputLevels, null, null); } - public UpdateDataverseCommand(Dataverse editedDv, + public UpdateDataverseCommand(Dataverse dataverse, List facets, List featuredDataverses, DataverseRequest request, List inputLevels, - List metadataBlocks) { - super(editedDv, editedDv, request, facets, inputLevels, metadataBlocks); + List metadataBlocks, + DataverseDTO updatedDataverseDTO) { + super(dataverse, dataverse, request, facets, inputLevels, metadataBlocks); if (featuredDataverses != null) { this.featuredDataverseList = new ArrayList<>(featuredDataverses); } else { this.featuredDataverseList = null; } + this.updatedDataverseDTO = updatedDataverseDTO; } @Override @@ -87,9 +91,37 @@ protected Dataverse innerExecute(CommandContext ctxt) throws IllegalCommandExcep } } + if (updatedDataverseDTO != null) { + updateDataverseFromDTO(dataverse, updatedDataverseDTO); + } + return dataverse; } + private void updateDataverseFromDTO(Dataverse dataverse, DataverseDTO dto) { + if (dto.getAlias() != null) { + dataverse.setAlias(dto.getAlias()); + } + if (dto.getName() != null) { + dataverse.setName(dto.getName()); + } + if (dto.getDescription() != null) { + dataverse.setDescription(dto.getDescription()); + } + if (dto.getAffiliation() != null) { + dataverse.setAffiliation(dto.getAffiliation()); + } + if (dto.getDataverseContacts() != null) { + dataverse.setDataverseContacts(dto.getDataverseContacts()); + for (DataverseContact dc : dataverse.getDataverseContacts()) { + dc.setDataverse(dataverse); + } + } + if (dto.getDataverseType() != null) { + dataverse.setDataverseType(dto.getDataverseType()); + } + } + @Override public boolean onSuccess(CommandContext ctxt, Object r) { diff --git a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java index f63e4c4fd9c..8552389525d 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/json/JsonParser.java @@ -19,6 +19,7 @@ import edu.harvard.iq.dataverse.MetadataBlockServiceBean; import edu.harvard.iq.dataverse.TermsOfUseAndAccess; import edu.harvard.iq.dataverse.api.Util; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.api.dto.FieldDTO; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; @@ -48,6 +49,7 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.function.Consumer; import java.util.logging.Logger; import java.util.stream.Collectors; @@ -130,8 +132,19 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setFacetRoot(jobj.getBoolean("facetRoot", false)); dv.setAffiliation(jobj.getString("affiliation", null)); - updateDataverseContacts(dv, jobj); - + if (jobj.containsKey("dataverseContacts")) { + JsonArray dvContacts = jobj.getJsonArray("dataverseContacts"); + int i = 0; + List dvContactList = new LinkedList<>(); + for (JsonValue jsv : dvContacts) { + DataverseContact dvc = new DataverseContact(dv); + dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); + dvc.setDisplayOrder(i++); + dvContactList.add(dvc); + } + dv.setDataverseContacts(dvContactList); + } + if (jobj.containsKey("theme")) { DataverseTheme theme = parseDataverseTheme(jobj.getJsonObject("theme")); dv.setDataverseTheme(theme); @@ -139,7 +152,13 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { } dv.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED); // default - updateDataverseType(dv, jobj); + String receivedDataverseType = jobj.getString("dataverseType", null); + if (receivedDataverseType != null) { + Arrays.stream(Dataverse.DataverseType.values()) + .filter(type -> type.name().equals(receivedDataverseType)) + .findFirst() + .ifPresent(dv::setDataverseType); + } if (jobj.containsKey("filePIDsEnabled")) { dv.setFilePIDsEnabled(jobj.getBoolean("filePIDsEnabled")); @@ -147,7 +166,7 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { /* We decided that subject is not user set, but gotten from the subject of the dataverse's datasets - leavig this code in for now, in case we need to go back to it at some point - + if (jobj.containsKey("dataverseSubjects")) { List dvSubjectList = new LinkedList<>(); DatasetFieldType subjectType = datasetFieldSvc.findByName(DatasetFieldConstant.subject); @@ -170,63 +189,49 @@ public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException { dv.setDataverseSubjects(dvSubjectList); } */ - + return dv; } - public Dataverse parseDataverseUpdates(JsonObject jsonObject, Dataverse dataverseToUpdate) throws JsonParseException { - String alias = jsonObject.getString("alias", null); - if (alias != null) { - dataverseToUpdate.setAlias(alias); - } - - String name = jsonObject.getString("name", null); - if (name != null) { - dataverseToUpdate.setName(name); - } - - String description = jsonObject.getString("description", null); - if (description != null) { - dataverseToUpdate.setDescription(description); - } - - String affiliation = jsonObject.getString("affiliation", null); - if (affiliation != null) { - dataverseToUpdate.setAffiliation(affiliation); - } - - updateDataverseType(dataverseToUpdate, jsonObject); + public DataverseDTO parseDataverseDTO(JsonObject jsonObject) throws JsonParseException { + DataverseDTO dataverseDTO = new DataverseDTO(); - updateDataverseContacts(dataverseToUpdate, jsonObject); + setDataverseDTOPropertyIfPresent(jsonObject, "alias", dataverseDTO::setAlias); + setDataverseDTOPropertyIfPresent(jsonObject, "name", dataverseDTO::setName); + setDataverseDTOPropertyIfPresent(jsonObject, "description", dataverseDTO::setDescription); + setDataverseDTOPropertyIfPresent(jsonObject, "affiliation", dataverseDTO::setAffiliation); - return dataverseToUpdate; - } - - private void updateDataverseType(Dataverse dataverse, JsonObject jsonObject) { - String receivedDataverseType = jsonObject.getString("dataverseType", null); - if (receivedDataverseType != null) { + String dataverseType = jsonObject.getString("dataverseType", null); + if (dataverseType != null) { Arrays.stream(Dataverse.DataverseType.values()) - .filter(type -> type.name().equals(receivedDataverseType)) + .filter(type -> type.name().equals(dataverseType)) .findFirst() - .ifPresent(dataverse::setDataverseType); + .ifPresent(dataverseDTO::setDataverseType); } - } - private void updateDataverseContacts(Dataverse dataverse, JsonObject jsonObject) throws JsonParseException { if (jsonObject.containsKey("dataverseContacts")) { JsonArray dvContacts = jsonObject.getJsonArray("dataverseContacts"); - int i = 0; - List dvContactList = new LinkedList<>(); - for (JsonValue jsv : dvContacts) { - DataverseContact dvc = new DataverseContact(dataverse); - dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail")); - dvc.setDisplayOrder(i++); - dvContactList.add(dvc); + List contacts = new ArrayList<>(); + for (int i = 0; i < dvContacts.size(); i++) { + JsonObject contactObj = dvContacts.getJsonObject(i); + DataverseContact contact = new DataverseContact(); + contact.setContactEmail(getMandatoryString(contactObj, "contactEmail")); + contact.setDisplayOrder(i); + contacts.add(contact); } - dataverse.setDataverseContacts(dvContactList); + dataverseDTO.setDataverseContacts(contacts); } + + return dataverseDTO; } - + + private void setDataverseDTOPropertyIfPresent(JsonObject jsonObject, String key, Consumer setter) { + String value = jsonObject.getString(key, null); + if (value != null) { + setter.accept(value); + } + } + public DataverseTheme parseDataverseTheme(JsonObject obj) { DataverseTheme theme = new DataverseTheme(); diff --git a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java index c311fa1016e..e6ec3cf5400 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/DataversesIT.java @@ -1341,6 +1341,26 @@ public void testUpdateDataverse() { apiToken ); updateDataverseResponse.then().assertThat().statusCode(NOT_FOUND.getStatusCode()); + + // User with unprivileged API token cannot update Root dataverse + updateDataverseResponse = UtilIT.updateDataverse( + "root", + newAlias, + newName, + newAffiliation, + newDataverseType, + newContactEmails, + newInputLevelNames, + newFacetIds, + newMetadataBlockNames, + apiToken + ); + updateDataverseResponse.then().assertThat().statusCode(UNAUTHORIZED.getStatusCode()); + + Response rootCollectionInfoResponse = UtilIT.exportDataverse("root", apiToken); + rootCollectionInfoResponse.then().assertThat() + .statusCode(OK.getStatusCode()) + .body("data.name", equalTo("Root")); } @Test diff --git a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java index 2cffa7d921c..f241a5d1dda 100644 --- a/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/util/json/JsonParserTest.java @@ -8,6 +8,7 @@ import edu.harvard.iq.dataverse.DatasetFieldType.FieldType; import edu.harvard.iq.dataverse.DataverseTheme.Alignment; import edu.harvard.iq.dataverse.UserNotification.Type; +import edu.harvard.iq.dataverse.api.dto.DataverseDTO; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroup; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.IpGroupProvider; import edu.harvard.iq.dataverse.authorization.groups.impl.ipaddress.ip.IpAddress; @@ -266,32 +267,23 @@ public void testParseCompleteDataverse() throws JsonParseException { } /** - * Test that a JSON object passed for a complete Dataverse update is correctly parsed. - * This checks that all properties are parsed into the correct dataverse properties. + * Test that a JSON object passed for a DataverseDTO is correctly parsed. + * This checks that all properties are parsed into the correct DataverseDTO properties. * @throws JsonParseException when this test is broken. */ @Test - public void parseDataverseUpdates() throws JsonParseException { - Dataverse dataverse = new Dataverse(); - dataverse.setName("Name to update"); - dataverse.setAlias("aliasToUpdate"); - dataverse.setAffiliation("Affiliation to update"); - dataverse.setDescription("Description to update"); - dataverse.setDataverseType(Dataverse.DataverseType.DEPARTMENT); - List originalContacts = new ArrayList<>(); - originalContacts.add(new DataverseContact(dataverse, "updatethis@example.edu")); - dataverse.setDataverseContacts(originalContacts); + public void parseDataverseDTO() throws JsonParseException { JsonObject dvJson; try (FileReader reader = new FileReader("doc/sphinx-guides/source/_static/api/dataverse-complete.json")) { dvJson = Json.createReader(reader).readObject(); - Dataverse actual = sut.parseDataverseUpdates(dvJson, dataverse); + DataverseDTO actual = sut.parseDataverseDTO(dvJson); assertEquals("Scientific Research", actual.getName()); assertEquals("science", actual.getAlias()); assertEquals("Scientific Research University", actual.getAffiliation()); assertEquals("We do all the science.", actual.getDescription()); assertEquals("LABORATORY", actual.getDataverseType().toString()); assertEquals(2, actual.getDataverseContacts().size()); - assertEquals("pi@example.edu,student@example.edu", actual.getContactEmails()); + assertEquals("pi@example.edu,student@example.edu", actual.getDataverseContacts().get(0).getContactEmail()); assertEquals(0, actual.getDataverseContacts().get(0).getDisplayOrder()); assertEquals(1, actual.getDataverseContacts().get(1).getDisplayOrder()); } catch (IOException ioe) {