From adeeca56ca08da6898f7ff94deb1fe8022ffc006 Mon Sep 17 00:00:00 2001
From: Don Sizemore
Date: Tue, 5 May 2020 14:42:09 -0400
Subject: [PATCH 001/179] #6897 correct ticks and quotes as found my @madunlap
---
doc/sphinx-guides/source/api/native-api.rst | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst
index 33d57ce5887..e2dde2184b0 100644
--- a/doc/sphinx-guides/source/api/native-api.rst
+++ b/doc/sphinx-guides/source/api/native-api.rst
@@ -1170,13 +1170,13 @@ In the curl example below, all of the above are specified but they are optional.
export SERVER_URL=https://demo.dataverse.org
export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB
- curl -H X-Dataverse-key:$API_TOKEN -X POST -F "file=@$FILENAME" -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID"
+ curl -H X-Dataverse-key:$API_TOKEN -X POST -F "file=@$FILENAME" -F jsonData='{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID"
The fully expanded example above (without environment variables) looks like this:
.. code-block:: bash
- curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"} https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB
+ curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F jsonData='{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"}' "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB"
You should expect a 201 ("CREATED") response and JSON indicating the database id that has been assigned to your newly uploaded file.
From 5c2391beb65528675609d499882e41eb56709518 Mon Sep 17 00:00:00 2001
From: ellenk
Date: Tue, 13 Oct 2020 14:13:25 -0400
Subject: [PATCH 002/179] new API method for saving a generic auxiliary file to
a data file
---
.../harvard/iq/dataverse/AuxiliaryFile.java | 90 +++++++++++++++++++
.../dataverse/AuxiliaryFileServiceBean.java | 83 +++++++++++++++++
.../edu/harvard/iq/dataverse/DataFile.java | 11 +++
.../edu/harvard/iq/dataverse/api/Access.java | 83 +++++++++++++----
.../harvard/iq/dataverse/api/AccessIT.java | 24 +++--
5 files changed, 269 insertions(+), 22 deletions(-)
create mode 100644 src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
create mode 100644 src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
new file mode 100644
index 00000000000..1c6c5708fe5
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
@@ -0,0 +1,90 @@
+
+package edu.harvard.iq.dataverse;
+
+import java.io.Serializable;
+import javax.persistence.Entity;
+import javax.persistence.GeneratedValue;
+import javax.persistence.GenerationType;
+import javax.persistence.Id;
+import javax.persistence.JoinColumn;
+import javax.persistence.ManyToOne;
+
+/**
+ *
+ * @author ekraffmiller
+ * Represents a generic file that is associated with a dataFile.
+ * This is a data representation of a physical file in StorageIO
+ */
+@Entity
+public class AuxiliaryFile implements Serializable {
+
+ @Id
+ @GeneratedValue(strategy = GenerationType.IDENTITY)
+ private Long id;
+
+ /**
+ * The data file that this AuxiliaryFile belongs to
+ * a data file may have many auxiliaryFiles
+ */
+ @ManyToOne
+ @JoinColumn(nullable=false)
+ private DataFile dataFile;
+
+ private String formatTag;
+
+ private String formatVersion;
+
+ private String origin;
+
+ private boolean isPublic;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public DataFile getDataFile() {
+ return dataFile;
+ }
+
+ public void setDataFile(DataFile dataFile) {
+ this.dataFile = dataFile;
+ }
+
+ public String getFormatTag() {
+ return formatTag;
+ }
+
+ public void setFormatTag(String formatTag) {
+ this.formatTag = formatTag;
+ }
+
+ public String getFormatVersion() {
+ return formatVersion;
+ }
+
+ public void setFormatVersion(String formatVersion) {
+ this.formatVersion = formatVersion;
+ }
+
+ public String getOrigin() {
+ return origin;
+ }
+
+ public void setOrigin(String origin) {
+ this.origin = origin;
+ }
+
+ public boolean getIsPublic() {
+ return isPublic;
+ }
+
+ public void setIsPublic(boolean isPublic) {
+ this.isPublic = isPublic;
+ }
+
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
new file mode 100644
index 00000000000..01b0ee6e865
--- /dev/null
+++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
@@ -0,0 +1,83 @@
+
+package edu.harvard.iq.dataverse;
+
+import edu.harvard.iq.dataverse.dataaccess.StorageIO;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.logging.Logger;
+import javax.ejb.Stateless;
+import javax.inject.Named;
+import javax.persistence.EntityManager;
+import javax.persistence.PersistenceContext;
+
+/**
+ *
+ * @author ekraffmiller
+ * Methods related to the AuxiliaryFile Entity.
+ */
+@Stateless
+@Named
+public class AuxiliaryFileServiceBean implements java.io.Serializable {
+ private static final Logger logger = Logger.getLogger(AuxiliaryFileServiceBean.class.getCanonicalName());
+
+ @PersistenceContext(unitName = "VDCNet-ejbPU")
+ private EntityManager em;
+
+ public AuxiliaryFile find(Object pk) {
+ return em.find(AuxiliaryFile.class, pk);
+ }
+
+ public AuxiliaryFile save(AuxiliaryFile auxiliaryFile) {
+ AuxiliaryFile savedFile = em.merge(auxiliaryFile);
+ return savedFile;
+
+ }
+
+ /**
+ * Save the physical file to storageIO, and save the AuxiliaryFile entity
+ * to the database. This should be an all or nothing transaction - if either
+ * process fails, than nothing will be saved
+ * @param fileInputStream - auxiliary file data to be saved
+ * @param dataFile - the dataFile entity this will be added to
+ * @param formatTag - type of file being saved
+ * @param formatVersion - to distinguish between multiple versions of a file
+ * @param origin - name of the tool/system that created the file
+ * @param isPublic boolean - is this file available to any user?
+ * @return success boolean - returns whether the save was successful
+ */
+ public boolean processAuxiliaryFile(InputStream fileInputStream, DataFile dataFile, String formatTag, String formatVersion, String origin, boolean isPublic) {
+
+ StorageIO storageIO =null;
+
+ String auxExtension = formatTag + "_" + formatVersion;
+ try {
+ // Save to storage first.
+ // If that is successful (does not throw exception),
+ // then save to db.
+ // If the db fails for any reason, then rollback
+ // by removing the auxfile from storage.
+ storageIO = dataFile.getStorageIO();
+ storageIO.saveInputStreamAsAux(fileInputStream, auxExtension);
+ AuxiliaryFile auxFile = new AuxiliaryFile();
+ auxFile.setFormatTag(formatTag);
+ auxFile.setFormatVersion(formatVersion);
+ auxFile.setOrigin(origin);
+ auxFile.setIsPublic(isPublic);
+ auxFile.setDataFile(dataFile);
+ save(auxFile);
+ } catch (IOException ioex) {
+ logger.info("IO Exception trying to save auxiliary file: " + ioex.getMessage());
+ return false;
+ } catch (Exception e) {
+ // If anything fails during database insert, remove file from storage
+ try {
+ storageIO.deleteAuxObject(auxExtension);
+ } catch(IOException ioex) {
+ logger.info("IO Exception trying remove auxiliary file in exception handler: " + ioex.getMessage());
+ return false;
+ }
+ }
+ return true;
+ }
+
+}
diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
index 560048db9ca..2f0981c80af 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java
@@ -192,6 +192,9 @@ public String toString() {
@OneToMany(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
private List dataTables;
+ @OneToMany(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
+ private List auxiliaryFiles;
+
@OneToMany(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
private List ingestReports;
@@ -281,6 +284,14 @@ public String getDuplicateFilename() {
public void setDuplicateFilename(String duplicateFilename) {
this.duplicateFilename = duplicateFilename;
}
+
+ public List getAuxiliaryFiles() {
+ return auxiliaryFiles;
+ }
+
+ public void setAuxiliaryFiles(List auxiliaryFiles) {
+ this.auxiliaryFiles = auxiliaryFiles;
+ }
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
index 8f913ea5f1b..f05dd02e0a4 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
@@ -6,6 +6,7 @@
package edu.harvard.iq.dataverse.api;
+import edu.harvard.iq.dataverse.AuxiliaryFileServiceBean;
import edu.harvard.iq.dataverse.DataCitation;
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.FileMetadata;
@@ -43,14 +44,12 @@
import edu.harvard.iq.dataverse.dataaccess.DataFileZipper;
import edu.harvard.iq.dataverse.dataaccess.OptionalAccessService;
import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter;
-import edu.harvard.iq.dataverse.dataaccess.StoredOriginalFile;
import edu.harvard.iq.dataverse.datavariable.DataVariable;
import edu.harvard.iq.dataverse.datavariable.VariableServiceBean;
import edu.harvard.iq.dataverse.engine.command.Command;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.CommandException;
import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand;
-import edu.harvard.iq.dataverse.engine.command.impl.CreateExplicitGroupCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand;
import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand;
@@ -62,13 +61,11 @@
import edu.harvard.iq.dataverse.export.DDIExportServiceBean;
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean;
import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry;
-import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil;
import edu.harvard.iq.dataverse.settings.SettingsServiceBean;
import edu.harvard.iq.dataverse.util.BundleUtil;
import edu.harvard.iq.dataverse.util.FileUtil;
import edu.harvard.iq.dataverse.util.StringUtil;
import edu.harvard.iq.dataverse.util.SystemConfig;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
import edu.harvard.iq.dataverse.worldmapauth.WorldMapTokenServiceBean;
import java.util.logging.Logger;
@@ -88,16 +85,9 @@
import java.util.logging.Level;
import javax.inject.Inject;
import javax.json.Json;
-import javax.json.JsonObjectBuilder;
-import java.math.BigDecimal;
import java.net.URI;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.function.Consumer;
-import javax.faces.context.FacesContext;
import javax.json.JsonArrayBuilder;
import javax.persistence.TypedQuery;
-import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
@@ -110,7 +100,6 @@
import javax.servlet.http.HttpServletResponse;
-import javax.servlet.http.HttpSession;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
@@ -125,10 +114,13 @@
import static javax.ws.rs.core.Response.Status.BAD_REQUEST;
import javax.ws.rs.core.StreamingOutput;
import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
-import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json;
import java.net.URISyntaxException;
import javax.ws.rs.RedirectionException;
+import javax.ws.rs.core.MediaType;
+import static javax.ws.rs.core.Response.Status.FORBIDDEN;
+import org.glassfish.jersey.media.multipart.FormDataBodyPart;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
/*
Custom API exceptions [NOT YET IMPLEMENTED]
@@ -184,6 +176,8 @@ public class Access extends AbstractApiBean {
UserNotificationServiceBean userNotificationService;
@EJB
FileDownloadServiceBean fileDownloadService;
+ @EJB
+ AuxiliaryFileServiceBean auxiliaryFileService;
@Inject
PermissionsWrapper permissionsWrapper;
@Inject
@@ -1084,6 +1078,65 @@ private String getWebappImageResource(String imageName) {
}
*/
+ /**
+ *
+ * @param fileId
+ * @param formatTag
+ * @param formatVersion
+ * @param origin
+ * @param isPublic
+ * @param fileInputStream
+ * @param contentDispositionHeader
+ * @param formDataBodyPart
+ * @return
+ */
+ @Path("datafile/{fileId}/metadata/{formatTag}/{formatVersion}")
+ @POST
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+
+ public Response saveAuxiliaryFileWithVersion(@PathParam("fileId") Long fileId,
+ @PathParam("formatTag") String formatTag,
+ @PathParam("formatVersion") String formatVersion,
+ @FormDataParam("origin") String origin,
+ @FormDataParam("isPublic") boolean isPublic,
+ @FormDataParam("file") InputStream fileInputStream,
+ @FormDataParam("file") FormDataContentDisposition contentDispositionHeader,
+ @FormDataParam("file") final FormDataBodyPart formDataBodyPart
+ ) {
+ AuthenticatedUser authenticatedUser;
+ try {
+ authenticatedUser = findAuthenticatedUserOrDie();
+ } catch (WrappedResponse ex) {
+ return error(FORBIDDEN, "Authorized users only.");
+ }
+
+ DataFile dataFile = dataFileService.find(fileId);
+ if (dataFile == null) {
+ return error(BAD_REQUEST, "File not found based on id " + fileId + ".");
+ }
+
+ if (!permissionService.userOn(authenticatedUser, dataFile.getOwner()).has(Permission.EditDataset)) {
+ return error(FORBIDDEN, "User not authorized to edit the dataset.");
+ }
+
+ if (!dataFile.isTabularData()) {
+ return error(BAD_REQUEST, "Not a tabular DataFile (db id=" + fileId + ")");
+ }
+
+
+ boolean saved = auxiliaryFileService.processAuxiliaryFile(fileInputStream, dataFile, formatTag, formatVersion, origin, isPublic);
+
+ if (saved) {
+ return ok("Auxiliary file has been saved.");
+ } else {
+ return error(BAD_REQUEST, "Error saving Auxiliary file.");
+ }
+ }
+
+
+
+
+
/**
* Allow (or disallow) access requests to Dataset
*
@@ -1835,5 +1888,5 @@ private URI handleCustomZipDownload(String customZipServiceUrl, String fileIds,
throw new BadRequestException();
}
return redirectUri;
- }
+ }
}
diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
index ad0c93a80d1..ba646aa3592 100644
--- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
+++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java
@@ -6,20 +6,23 @@
package edu.harvard.iq.dataverse.api;
import com.jayway.restassured.RestAssured;
+import static com.jayway.restassured.RestAssured.given;
import com.jayway.restassured.path.json.JsonPath;
import com.jayway.restassured.response.Response;
import edu.harvard.iq.dataverse.DataFile;
+import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER;
import edu.harvard.iq.dataverse.util.FileUtil;
import java.io.IOException;
import java.util.zip.ZipInputStream;
-import static javax.ws.rs.core.Response.Status.OK;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.zip.ZipEntry;
import java.io.ByteArrayOutputStream;
+import java.io.File;
import java.io.InputStream;
import java.util.HashMap;
+import static javax.ws.rs.core.Response.Status.OK;
import org.hamcrest.collection.IsMapContaining;
import static junit.framework.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@@ -27,12 +30,6 @@
import static org.junit.Assert.assertTrue;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
-import static junit.framework.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.hamcrest.CoreMatchers.is;
-import static org.hamcrest.CoreMatchers.not;
/**
*
@@ -156,6 +153,7 @@ public static void setUp() throws InterruptedException {
String tab4PathToFile = "scripts/search/data/tabular/" + tabFile4NameUnpublished;
Response tab4AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab4PathToFile, apiToken);
tabFile4IdUnpublished = JsonPath.from(tab4AddResponse.body().asString()).getInt("data.files[0].dataFile.id");
+ assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile2Name, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION));
}
@@ -172,6 +170,18 @@ public static void tearDown() {
}
+ @Test
+ public void testSaveAuxiliaryFileWithVersion() {
+ System.out.println("Add aux file with update");
+ String mimeType = null;
+ String pathToFile = "scripts/search/data/tabular/1char";
+ Response response = given()
+ .header(API_TOKEN_HTTP_HEADER, apiToken)
+ .multiPart("file", new File(pathToFile), mimeType)
+ .post("/api/access/datafile/" + tabFile1Id + "/metadata/dpJSON/v1");
+ response.prettyPrint();
+ assertEquals(200, response.getStatusCode());
+ }
//This test does a lot of testing of non-original downloads as well
@Test
From 15a782a4804cf97a99bd90499f3fb6eee1ebe14a Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Fri, 16 Oct 2020 11:51:08 -0400
Subject: [PATCH 003/179] name check but ignore fileToReplace name
---
.../java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 2 +-
.../api/datadeposit/MediaResourceManagerImpl.java | 2 +-
.../dataverse/datasetutility/AddReplaceFileHelper.java | 2 +-
.../harvard/iq/dataverse/ingest/IngestServiceBean.java | 7 ++-----
.../java/edu/harvard/iq/dataverse/ingest/IngestUtil.java | 9 ++++++---
5 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
index b4feecfcdf4..06960f45c9b 100644
--- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java
@@ -1128,7 +1128,7 @@ public String save() {
}
// Try to save the NEW files permanently:
- List filesAdded = ingestService.saveAndAddFilesToDataset(workingVersion, newFiles, false);
+ List filesAdded = ingestService.saveAndAddFilesToDataset(workingVersion, newFiles, null);
// reset the working list of fileMetadatas, as to only include the ones
// that have been added to the version successfully:
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
index 23730885aab..84095e936ab 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java
@@ -335,7 +335,7 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + violation.getMessage() + " The invalid value was \"" + violation.getInvalidValue() + "\".");
} else {
- ingestService.saveAndAddFilesToDataset(editVersion, dataFiles, false);
+ ingestService.saveAndAddFilesToDataset(editVersion, dataFiles, null);
}
} else {
diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
index ab34b5b2675..ea1cfc38cfa 100644
--- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
+++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java
@@ -1501,7 +1501,7 @@ private boolean step_060_addFilesViaIngestService(){
}
int nFiles = finalFileList.size();
- finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, isFileReplaceOperation());
+ finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, fileToReplace);
if (nFiles != finalFileList.size()) {
if (nFiles == 1) {
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
index f5eeaa1c316..ff114646d38 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java
@@ -155,7 +155,7 @@ public class IngestServiceBean {
// DataFileCategory objects, if any were already assigned to the files).
// It must be called before we attempt to permanently save the files in
// the database by calling the Save command on the dataset and/or version.
- public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, boolean isReplaceOperation) {
+ public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, DataFile fileToReplace) {
List ret = new ArrayList<>();
if (newFiles != null && newFiles.size() > 0) {
@@ -164,10 +164,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles) {
+ public static void checkForDuplicateFileNamesFinal(DatasetVersion version, List newFiles, DataFile fileToReplace) {
// Step 1: create list of existing path names from all FileMetadata in the DatasetVersion
// unique path name: directoryLabel + file separator + fileLabel
Set pathNamesExisting = existingPathNamesAsSet(version);
-
+ if(fileToReplace!=null) {
+ pathNamesExisting.removeAll(existingPathNamesAsSet(version, fileToReplace.getFileMetadata()));
+ }
// Step 2: check each new DataFile against the list of path names, if a duplicate create a new unique file name
for (Iterator dfIt = newFiles.iterator(); dfIt.hasNext();) {
@@ -248,7 +251,7 @@ public static Set existingPathNamesAsSet(DatasetVersion version) {
return existingPathNamesAsSet(version, null);
}
- private static Set existingPathNamesAsSet(DatasetVersion version, FileMetadata fileMetadata) {
+ public static Set existingPathNamesAsSet(DatasetVersion version, FileMetadata fileMetadata) {
Set pathNamesExisting = new HashSet<>();
// create list of existing path names from all FileMetadata in the DatasetVersion
From 591c427d892c4560dcb7bfa6f9c34908cacba240 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Fri, 16 Oct 2020 11:58:03 -0400
Subject: [PATCH 004/179] update to match
---
src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
index 7f1a632a86a..9b8af888b60 100644
--- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
+++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java
@@ -3619,7 +3619,7 @@ public String save() {
// have been created in the dataset.
dataset = datasetService.find(dataset.getId());
- List filesAdded = ingestService.saveAndAddFilesToDataset(dataset.getEditVersion(), newFiles, false);
+ List filesAdded = ingestService.saveAndAddFilesToDataset(dataset.getEditVersion(), newFiles, null);
newFiles.clear();
// and another update command:
From bea1ed24a9b09131fd967731c3409e3380dab4b0 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Fri, 16 Oct 2020 12:33:28 -0400
Subject: [PATCH 005/179] update test and add new replace test
---
.../iq/dataverse/ingest/IngestUtilTest.java | 117 ++++++++++++++++--
1 file changed, 106 insertions(+), 11 deletions(-)
diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
index abc773971c2..054311ae5da 100644
--- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
+++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java
@@ -98,7 +98,7 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception {
dataFileList.add(datafile2);
- IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
+ IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
boolean file1NameAltered = false;
boolean file2NameAltered = false;
@@ -111,12 +111,12 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception {
}
}
- // check filenames are unique and unaltered
+ // check filenames are unique and altered
assertEquals(file1NameAltered, true);
assertEquals(file2NameAltered, true);
// try to add data files with "-1" duplicates and see if it gets incremented to "-2"
- IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
+ IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
for (DataFile df : dataFileList) {
if (df.getFileMetadata().getLabel().equals("datafile1-2.txt")) {
@@ -127,7 +127,7 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception {
}
}
- // check filenames are unique and unaltered
+ // check filenames are unique and altered
assertEquals(file1NameAltered, true);
assertEquals(file2NameAltered, true);
}
@@ -204,7 +204,7 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce
dataFileList.add(datafile2);
- IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
+ IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
boolean file1NameAltered = false;
boolean file2NameAltered = false;
@@ -217,12 +217,12 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce
}
}
- // check filenames are unique and unaltered
+ // check filenames are unique and altered
assertEquals(file1NameAltered, true);
assertEquals(file2NameAltered, true);
// try to add data files with "-1" duplicates and see if it gets incremented to "-2"
- IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
+ IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
for (DataFile df : dataFileList) {
if (df.getFileMetadata().getLabel().equals("datafile1-2.txt")) {
@@ -233,7 +233,7 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce
}
}
- // check filenames are unique and unaltered
+ // check filenames are unique and altered
assertEquals(file1NameAltered, true);
assertEquals(file2NameAltered, true);
}
@@ -329,7 +329,7 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception {
dataFileList.add(datafile3);
- IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
+ IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
boolean file1NameAltered = false;
boolean file2NameAltered = false;
@@ -356,7 +356,7 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception {
fmd3.setDatasetVersion(datasetVersion);
// try to add data files with "-1" duplicates and see if it gets incremented to "-2"
- IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
+ IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
for (DataFile df : dataFileList) {
if (df.getFileMetadata().getLabel().equals("datafile1-2.txt")) {
@@ -447,7 +447,7 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception {
dataFileList.add(datafile2);
- IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList);
+ IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null);
boolean file2NameAltered = false;
for (DataFile df : dataFileList) {
@@ -460,6 +460,101 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception {
assertEquals(file2NameAltered, true);
}
+
+ @Test
+ /**
+ * Test adding duplicate file name labels to a dataset version with empty
+ * directory labels when replacing a file. This should simulate what happens when replacing a file
+ * via the file upload UI.
+ */
+ public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception {
+
+ SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd");
+
+ // create dataset
+ Dataset dataset = makeDataset();
+
+ // create dataset version
+ DatasetVersion datasetVersion = dataset.getEditVersion();
+ datasetVersion.setCreateTime(dateFmt.parse("20001012"));
+ datasetVersion.setLastUpdateTime(datasetVersion.getLastUpdateTime());
+ datasetVersion.setId(MocksFactory.nextId());
+ datasetVersion.setReleaseTime(dateFmt.parse("20010101"));
+ datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED);
+ datasetVersion.setMinorVersionNumber(0L);
+ datasetVersion.setVersionNumber(1L);
+ datasetVersion.setFileMetadatas(new ArrayList<>());
+
+ // create datafiles
+ List dataFileList = new ArrayList<>();
+ DataFile datafile1 = new DataFile("application/octet-stream");
+ datafile1.setStorageIdentifier("datafile1.txt");
+ datafile1.setFilesize(200);
+ datafile1.setModificationTime(new Timestamp(new Date().getTime()));
+ datafile1.setCreateDate(new Timestamp(new Date().getTime()));
+ datafile1.setPermissionModificationTime(new Timestamp(new Date().getTime()));
+ datafile1.setOwner(dataset);
+ datafile1.setIngestDone();
+ datafile1.setChecksumType(DataFile.ChecksumType.SHA1);
+ datafile1.setChecksumValue("Unknown");
+
+ // set metadata and add verson
+ FileMetadata fmd1 = new FileMetadata();
+ fmd1.setId(1L);
+ fmd1.setLabel("datafile1.txt");
+ fmd1.setDirectoryLabel("");
+ fmd1.setDataFile(datafile1);
+ datafile1.getFileMetadatas().add(fmd1);
+ datasetVersion.getFileMetadatas().add(fmd1);
+ fmd1.setDatasetVersion(datasetVersion);
+
+ dataFileList.add(datafile1);
+
+ DataFile datafile2 = new DataFile("application/octet-stream");
+ datafile2.setStorageIdentifier("datafile2.txt");
+ datafile2.setFilesize(200);
+ datafile2.setModificationTime(new Timestamp(new Date().getTime()));
+ datafile2.setCreateDate(new Timestamp(new Date().getTime()));
+ datafile2.setPermissionModificationTime(new Timestamp(new Date().getTime()));
+ datafile2.setOwner(dataset);
+ datafile2.setIngestDone();
+ datafile2.setChecksumType(DataFile.ChecksumType.SHA1);
+ datafile2.setChecksumValue("Unknown");
+
+ // set metadata and add version
+ FileMetadata fmd2 = new FileMetadata();
+ fmd2.setId(2L);
+ fmd2.setLabel("datafile2.txt");
+ fmd2.setDirectoryLabel("");
+ fmd2.setDataFile(datafile2);
+ datafile2.getFileMetadatas().add(fmd2);
+ datasetVersion.getFileMetadatas().add(fmd2);
+ fmd2.setDatasetVersion(datasetVersion);
+
+ dataFileList.add(datafile2);
+
+ /*In a real replace, there should only be one file in dataFileList. Having both files in dataFileList, we're essentially testing two cases at once:
+ * - the replacing file name conflicts with some other file's name
+ * - the replacing file's name only conflicts with the file being replaced (datafile2) and shouldn't be changed
+ */
+ IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, datafile2);
+
+ boolean file1NameAltered = false;
+ boolean file2NameAltered = false;
+ for (DataFile df : dataFileList) {
+ if (df.getFileMetadata().getLabel().equals("datafile1-1.txt")) {
+ file1NameAltered = true;
+ }
+ if (df.getFileMetadata().getLabel().equals("datafile2-1.txt")) {
+ file2NameAltered = true;
+ }
+ }
+
+ // check filenames are unique and unaltered
+ assertEquals(file1NameAltered, true);
+ assertEquals(file2NameAltered, false);
+ }
+
@Test
public void testDirectoryLabels() {
From d2cea2b394c9b6e5d266691d29f23d7aeeede083 Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Fri, 16 Oct 2020 12:46:43 -0400
Subject: [PATCH 006/179] invert logic - method already excludes the one
filemetadata
---
.../java/edu/harvard/iq/dataverse/ingest/IngestUtil.java | 5 +----
1 file changed, 1 insertion(+), 4 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
index 422b5e803a1..d112b7edbc0 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
@@ -63,10 +63,7 @@ public static void checkForDuplicateFileNamesFinal(DatasetVersion version, List<
// Step 1: create list of existing path names from all FileMetadata in the DatasetVersion
// unique path name: directoryLabel + file separator + fileLabel
- Set pathNamesExisting = existingPathNamesAsSet(version);
- if(fileToReplace!=null) {
- pathNamesExisting.removeAll(existingPathNamesAsSet(version, fileToReplace.getFileMetadata()));
- }
+ Set pathNamesExisting = existingPathNamesAsSet(version, fileToReplace.getFileMetadata());
// Step 2: check each new DataFile against the list of path names, if a duplicate create a new unique file name
for (Iterator dfIt = newFiles.iterator(); dfIt.hasNext();) {
From 10fc3dc3a7c3b3a3b5301feb18f10aac5a9ab6cc Mon Sep 17 00:00:00 2001
From: Jim Myers
Date: Fri, 16 Oct 2020 12:50:50 -0400
Subject: [PATCH 007/179] and null check
---
src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
index d112b7edbc0..dcf3104da7c 100644
--- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
+++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java
@@ -63,7 +63,7 @@ public static void checkForDuplicateFileNamesFinal(DatasetVersion version, List<
// Step 1: create list of existing path names from all FileMetadata in the DatasetVersion
// unique path name: directoryLabel + file separator + fileLabel
- Set pathNamesExisting = existingPathNamesAsSet(version, fileToReplace.getFileMetadata());
+ Set pathNamesExisting = existingPathNamesAsSet(version, ((fileToReplace == null) ? null : fileToReplace.getFileMetadata()));
// Step 2: check each new DataFile against the list of path names, if a duplicate create a new unique file name
for (Iterator dfIt = newFiles.iterator(); dfIt.hasNext();) {
From 91748b3e395c78b176eab1fd9f66085cfaae36bb Mon Sep 17 00:00:00 2001
From: Leonid Andreev
Date: Mon, 19 Oct 2020 18:44:09 -0400
Subject: [PATCH 008/179] modified framework for the download/GET part of the
new aux. metadata API. will post more info tomorrow, about what may still
needs to be done there. #7275
---
.../harvard/iq/dataverse/AuxiliaryFile.java | 20 ++++++
.../dataverse/AuxiliaryFileServiceBean.java | 21 +++++++
.../edu/harvard/iq/dataverse/api/Access.java | 61 +++++++++++++++----
.../iq/dataverse/api/DownloadInstance.java | 15 +++++
.../dataverse/api/DownloadInstanceWriter.java | 12 ++++
5 files changed, 116 insertions(+), 13 deletions(-)
diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
index 1c6c5708fe5..55655e3974f 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java
@@ -37,6 +37,10 @@ public class AuxiliaryFile implements Serializable {
private String origin;
private boolean isPublic;
+
+ private String contentType;
+
+ private Long fileSize;
public Long getId() {
return id;
@@ -86,5 +90,21 @@ public void setIsPublic(boolean isPublic) {
this.isPublic = isPublic;
}
+ public String getContentType() {
+ // TODO: hard-coded for testing:
+ return "application/json";
+ //return contentType;
+ }
+
+ public void setContentType(String contentType) {
+ this.contentType = contentType;
+ }
+
+ public Long getFileSize() {
+ return fileSize;
+ }
+ public void setFileSize(long fileSize) {
+ this.fileSize = fileSize;
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
index 01b0ee6e865..2c09ff3f6a1 100644
--- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
+++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java
@@ -9,6 +9,7 @@
import javax.inject.Named;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
+import javax.persistence.Query;
/**
*
@@ -64,6 +65,9 @@ public boolean processAuxiliaryFile(InputStream fileInputStream, DataFile dataFi
auxFile.setOrigin(origin);
auxFile.setIsPublic(isPublic);
auxFile.setDataFile(dataFile);
+ // TODO: mime type!
+ //auxFile.setContentType(mimeType);
+ auxFile.setFileSize(storageIO.getAuxObjectSize(auxExtension));
save(auxFile);
} catch (IOException ioex) {
logger.info("IO Exception trying to save auxiliary file: " + ioex.getMessage());
@@ -79,5 +83,22 @@ public boolean processAuxiliaryFile(InputStream fileInputStream, DataFile dataFi
}
return true;
}
+
+ // Looks up an auxiliary file by its parent DataFile, the formatTag and version
+ // TODO: improve as needed.
+ public AuxiliaryFile lookupAuxiliaryFile(DataFile dataFile, String formatTag, String formatVersion) {
+
+ Query query = em.createQuery("select object(o) from AuxiliaryFile as o where o.dataFile.id = :dataFileId and o.formatTag = :formatTag and o.formatVersion = :formatVersion");
+
+ query.setParameter("dataFileId", dataFile.getId());
+ query.setParameter("formatTag", formatTag);
+ query.setParameter("formatVersion", formatVersion);
+ try {
+ AuxiliaryFile retVal = (AuxiliaryFile)query.getSingleResult();
+ return retVal;
+ } catch(Exception ex) {
+ return null;
+ }
+ }
}
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
index f05dd02e0a4..3a5c38dec64 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java
@@ -6,6 +6,7 @@
package edu.harvard.iq.dataverse.api;
+import edu.harvard.iq.dataverse.AuxiliaryFile;
import edu.harvard.iq.dataverse.AuxiliaryFileServiceBean;
import edu.harvard.iq.dataverse.DataCitation;
import edu.harvard.iq.dataverse.DataFile;
@@ -499,16 +500,20 @@ public String dataVariableMetadataDDI(@PathParam("varId") Long varId, @QueryPara
}
/*
- * "Preprocessed data" metadata format:
- * (this was previously provided as a "format conversion" option of the
- * file download form of the access API call)
+ * GET method for retrieving various auxiliary files associated with
+ * a tabular datafile.
*/
- @Path("datafile/{fileId}/metadata/preprocessed")
+ @Path("datafile/{fileId}/metadata/{formatTag}/{formatVersion}")
@GET
- @Produces({"text/xml"})
- public DownloadInstance tabularDatafileMetadataPreprocessed(@PathParam("fileId") String fileId, @QueryParam("key") String apiToken, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws ServiceUnavailableException {
+ public DownloadInstance tabularDatafileMetadataPreprocessed(@PathParam("fileId") String fileId,
+ @PathParam("formatTag") String formatTag,
+ @PathParam("formatVersion") String formatVersion,
+ @QueryParam("key") String apiToken,
+ @Context UriInfo uriInfo,
+ @Context HttpHeaders headers,
+ @Context HttpServletResponse response) throws ServiceUnavailableException {
DataFile df = findDataFileOrDieWrapper(fileId);
@@ -516,18 +521,48 @@ public DownloadInstance tabularDatafileMetadataPreprocessed(@PathParam("fileId")
apiToken = headers.getHeaderString(API_KEY_HEADER);
}
- // This will throw a ForbiddenException if access isn't authorized:
- checkAuthorization(df, apiToken);
DownloadInfo dInfo = new DownloadInfo(df);
+ boolean publiclyAvailable = false;
- if (df.isTabularData()) {
+ if (!df.isTabularData()) {
+ throw new BadRequestException("tabular data required");
+ }
+
+ DownloadInstance downloadInstance;
+ AuxiliaryFile auxFile = null;
+
+ // formatTag=preprocessed is handled as a special case.
+ // This is (as of now) the only aux. tabular metadata format that Dataverse
+ // can generate (and cache) itself. (All the other formats served have
+ // to be deposited first, by the @POST version of this API).
+
+ if ("preprocessed".equals(formatTag)) {
dInfo.addServiceAvailable(new OptionalAccessService("preprocessed", "application/json", "format=prep", "Preprocessed data in JSON"));
+ downloadInstance = new DownloadInstance(dInfo);
+ if (downloadInstance.checkIfServiceSupportedAndSetConverter("format", "prep")) {
+ logger.fine("Preprocessed data for tabular file "+fileId);
+ }
} else {
- throw new BadRequestException("tabular data required");
+ // All other (deposited) formats:
+ auxFile = auxiliaryFileService.lookupAuxiliaryFile(df, formatTag, formatVersion);
+
+ if (auxFile == null) {
+ throw new NotFoundException("Auxiliary metadata format "+formatTag+" is not available for datafile "+fileId);
+ }
+
+ if (auxFile.getIsPublic()) {
+ publiclyAvailable = true;
+ }
+ downloadInstance = new DownloadInstance(dInfo);
+ downloadInstance.setAuxiliaryFile(auxFile);
}
- DownloadInstance downloadInstance = new DownloadInstance(dInfo);
- if (downloadInstance.checkIfServiceSupportedAndSetConverter("format", "prep")) {
- logger.fine("Preprocessed data for tabular file "+fileId);
+
+ // Unless this format is explicitly authorized to be publicly available,
+ // the following will check access authorization (based on the access rules
+ // as defined for the DataFile itself), and will throw a ForbiddenException
+ // if access is denied:
+ if (!publiclyAvailable) {
+ checkAuthorization(df, apiToken);
}
return downloadInstance;
diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
index 7e354bea24b..07215cb919e 100644
--- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
+++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java
@@ -6,6 +6,7 @@
package edu.harvard.iq.dataverse.api;
//import java.io.ByteArrayOutputStream;
+import edu.harvard.iq.dataverse.AuxiliaryFile;
import edu.harvard.iq.dataverse.DataverseRequestServiceBean;
import edu.harvard.iq.dataverse.EjbDataverseEngine;
import edu.harvard.iq.dataverse.GuestbookResponse;
@@ -47,6 +48,12 @@ public void setExtraArguments(List
@@ -171,7 +171,7 @@
+ update="manageGroupsForm">