From adeeca56ca08da6898f7ff94deb1fe8022ffc006 Mon Sep 17 00:00:00 2001 From: Don Sizemore Date: Tue, 5 May 2020 14:42:09 -0400 Subject: [PATCH 001/179] #6897 correct ticks and quotes as found my @madunlap --- doc/sphinx-guides/source/api/native-api.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/sphinx-guides/source/api/native-api.rst b/doc/sphinx-guides/source/api/native-api.rst index 33d57ce5887..e2dde2184b0 100644 --- a/doc/sphinx-guides/source/api/native-api.rst +++ b/doc/sphinx-guides/source/api/native-api.rst @@ -1170,13 +1170,13 @@ In the curl example below, all of the above are specified but they are optional. export SERVER_URL=https://demo.dataverse.org export PERSISTENT_ID=doi:10.5072/FK2/J8SJZB - curl -H X-Dataverse-key:$API_TOKEN -X POST -F "file=@$FILENAME" -F 'jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID" + curl -H X-Dataverse-key:$API_TOKEN -X POST -F "file=@$FILENAME" -F jsonData='{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"}' "$SERVER_URL/api/datasets/:persistentId/add?persistentId=$PERSISTENT_ID" The fully expanded example above (without environment variables) looks like this: .. code-block:: bash - curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F jsonData={"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"} https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB + curl -H X-Dataverse-key:xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -X POST -F file=@data.tsv -F jsonData='{"description":"My description.","directoryLabel":"data/subdir1","categories":["Data"], "restrict":"false"}' "https://demo.dataverse.org/api/datasets/:persistentId/add?persistentId=doi:10.5072/FK2/J8SJZB" You should expect a 201 ("CREATED") response and JSON indicating the database id that has been assigned to your newly uploaded file. From 5c2391beb65528675609d499882e41eb56709518 Mon Sep 17 00:00:00 2001 From: ellenk Date: Tue, 13 Oct 2020 14:13:25 -0400 Subject: [PATCH 002/179] new API method for saving a generic auxiliary file to a data file --- .../harvard/iq/dataverse/AuxiliaryFile.java | 90 +++++++++++++++++++ .../dataverse/AuxiliaryFileServiceBean.java | 83 +++++++++++++++++ .../edu/harvard/iq/dataverse/DataFile.java | 11 +++ .../edu/harvard/iq/dataverse/api/Access.java | 83 +++++++++++++---- .../harvard/iq/dataverse/api/AccessIT.java | 24 +++-- 5 files changed, 269 insertions(+), 22 deletions(-) create mode 100644 src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java create mode 100644 src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java new file mode 100644 index 00000000000..1c6c5708fe5 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java @@ -0,0 +1,90 @@ + +package edu.harvard.iq.dataverse; + +import java.io.Serializable; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.JoinColumn; +import javax.persistence.ManyToOne; + +/** + * + * @author ekraffmiller + * Represents a generic file that is associated with a dataFile. + * This is a data representation of a physical file in StorageIO + */ +@Entity +public class AuxiliaryFile implements Serializable { + + @Id + @GeneratedValue(strategy = GenerationType.IDENTITY) + private Long id; + + /** + * The data file that this AuxiliaryFile belongs to + * a data file may have many auxiliaryFiles + */ + @ManyToOne + @JoinColumn(nullable=false) + private DataFile dataFile; + + private String formatTag; + + private String formatVersion; + + private String origin; + + private boolean isPublic; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public DataFile getDataFile() { + return dataFile; + } + + public void setDataFile(DataFile dataFile) { + this.dataFile = dataFile; + } + + public String getFormatTag() { + return formatTag; + } + + public void setFormatTag(String formatTag) { + this.formatTag = formatTag; + } + + public String getFormatVersion() { + return formatVersion; + } + + public void setFormatVersion(String formatVersion) { + this.formatVersion = formatVersion; + } + + public String getOrigin() { + return origin; + } + + public void setOrigin(String origin) { + this.origin = origin; + } + + public boolean getIsPublic() { + return isPublic; + } + + public void setIsPublic(boolean isPublic) { + this.isPublic = isPublic; + } + + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java new file mode 100644 index 00000000000..01b0ee6e865 --- /dev/null +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java @@ -0,0 +1,83 @@ + +package edu.harvard.iq.dataverse; + +import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import java.io.IOException; +import java.io.InputStream; +import java.util.logging.Logger; +import javax.ejb.Stateless; +import javax.inject.Named; +import javax.persistence.EntityManager; +import javax.persistence.PersistenceContext; + +/** + * + * @author ekraffmiller + * Methods related to the AuxiliaryFile Entity. + */ +@Stateless +@Named +public class AuxiliaryFileServiceBean implements java.io.Serializable { + private static final Logger logger = Logger.getLogger(AuxiliaryFileServiceBean.class.getCanonicalName()); + + @PersistenceContext(unitName = "VDCNet-ejbPU") + private EntityManager em; + + public AuxiliaryFile find(Object pk) { + return em.find(AuxiliaryFile.class, pk); + } + + public AuxiliaryFile save(AuxiliaryFile auxiliaryFile) { + AuxiliaryFile savedFile = em.merge(auxiliaryFile); + return savedFile; + + } + + /** + * Save the physical file to storageIO, and save the AuxiliaryFile entity + * to the database. This should be an all or nothing transaction - if either + * process fails, than nothing will be saved + * @param fileInputStream - auxiliary file data to be saved + * @param dataFile - the dataFile entity this will be added to + * @param formatTag - type of file being saved + * @param formatVersion - to distinguish between multiple versions of a file + * @param origin - name of the tool/system that created the file + * @param isPublic boolean - is this file available to any user? + * @return success boolean - returns whether the save was successful + */ + public boolean processAuxiliaryFile(InputStream fileInputStream, DataFile dataFile, String formatTag, String formatVersion, String origin, boolean isPublic) { + + StorageIO storageIO =null; + + String auxExtension = formatTag + "_" + formatVersion; + try { + // Save to storage first. + // If that is successful (does not throw exception), + // then save to db. + // If the db fails for any reason, then rollback + // by removing the auxfile from storage. + storageIO = dataFile.getStorageIO(); + storageIO.saveInputStreamAsAux(fileInputStream, auxExtension); + AuxiliaryFile auxFile = new AuxiliaryFile(); + auxFile.setFormatTag(formatTag); + auxFile.setFormatVersion(formatVersion); + auxFile.setOrigin(origin); + auxFile.setIsPublic(isPublic); + auxFile.setDataFile(dataFile); + save(auxFile); + } catch (IOException ioex) { + logger.info("IO Exception trying to save auxiliary file: " + ioex.getMessage()); + return false; + } catch (Exception e) { + // If anything fails during database insert, remove file from storage + try { + storageIO.deleteAuxObject(auxExtension); + } catch(IOException ioex) { + logger.info("IO Exception trying remove auxiliary file in exception handler: " + ioex.getMessage()); + return false; + } + } + return true; + } + +} diff --git a/src/main/java/edu/harvard/iq/dataverse/DataFile.java b/src/main/java/edu/harvard/iq/dataverse/DataFile.java index 560048db9ca..2f0981c80af 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DataFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/DataFile.java @@ -192,6 +192,9 @@ public String toString() { @OneToMany(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List dataTables; + @OneToMany(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) + private List auxiliaryFiles; + @OneToMany(mappedBy = "dataFile", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST}) private List ingestReports; @@ -281,6 +284,14 @@ public String getDuplicateFilename() { public void setDuplicateFilename(String duplicateFilename) { this.duplicateFilename = duplicateFilename; } + + public List getAuxiliaryFiles() { + return auxiliaryFiles; + } + + public void setAuxiliaryFiles(List auxiliaryFiles) { + this.auxiliaryFiles = auxiliaryFiles; + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index 8f913ea5f1b..f05dd02e0a4 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -6,6 +6,7 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.AuxiliaryFileServiceBean; import edu.harvard.iq.dataverse.DataCitation; import edu.harvard.iq.dataverse.DataFile; import edu.harvard.iq.dataverse.FileMetadata; @@ -43,14 +44,12 @@ import edu.harvard.iq.dataverse.dataaccess.DataFileZipper; import edu.harvard.iq.dataverse.dataaccess.OptionalAccessService; import edu.harvard.iq.dataverse.dataaccess.ImageThumbConverter; -import edu.harvard.iq.dataverse.dataaccess.StoredOriginalFile; import edu.harvard.iq.dataverse.datavariable.DataVariable; import edu.harvard.iq.dataverse.datavariable.VariableServiceBean; import edu.harvard.iq.dataverse.engine.command.Command; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.engine.command.exception.CommandException; import edu.harvard.iq.dataverse.engine.command.impl.AssignRoleCommand; -import edu.harvard.iq.dataverse.engine.command.impl.CreateExplicitGroupCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetDatasetCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetDraftDatasetVersionCommand; import edu.harvard.iq.dataverse.engine.command.impl.GetLatestAccessibleDatasetVersionCommand; @@ -62,13 +61,11 @@ import edu.harvard.iq.dataverse.export.DDIExportServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean; import edu.harvard.iq.dataverse.makedatacount.MakeDataCountLoggingServiceBean.MakeDataCountEntry; -import edu.harvard.iq.dataverse.makedatacount.MakeDataCountUtil; import edu.harvard.iq.dataverse.settings.SettingsServiceBean; import edu.harvard.iq.dataverse.util.BundleUtil; import edu.harvard.iq.dataverse.util.FileUtil; import edu.harvard.iq.dataverse.util.StringUtil; import edu.harvard.iq.dataverse.util.SystemConfig; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import edu.harvard.iq.dataverse.worldmapauth.WorldMapTokenServiceBean; import java.util.logging.Logger; @@ -88,16 +85,9 @@ import java.util.logging.Level; import javax.inject.Inject; import javax.json.Json; -import javax.json.JsonObjectBuilder; -import java.math.BigDecimal; import java.net.URI; -import java.util.HashSet; -import java.util.Set; -import java.util.function.Consumer; -import javax.faces.context.FacesContext; import javax.json.JsonArrayBuilder; import javax.persistence.TypedQuery; -import javax.servlet.http.HttpServletRequest; import javax.ws.rs.GET; import javax.ws.rs.Path; @@ -110,7 +100,6 @@ import javax.servlet.http.HttpServletResponse; -import javax.servlet.http.HttpSession; import javax.ws.rs.BadRequestException; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; @@ -125,10 +114,13 @@ import static javax.ws.rs.core.Response.Status.BAD_REQUEST; import javax.ws.rs.core.StreamingOutput; import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; -import static edu.harvard.iq.dataverse.util.json.JsonPrinter.json; import java.net.URISyntaxException; import javax.ws.rs.RedirectionException; +import javax.ws.rs.core.MediaType; +import static javax.ws.rs.core.Response.Status.FORBIDDEN; +import org.glassfish.jersey.media.multipart.FormDataBodyPart; +import org.glassfish.jersey.media.multipart.FormDataContentDisposition; +import org.glassfish.jersey.media.multipart.FormDataParam; /* Custom API exceptions [NOT YET IMPLEMENTED] @@ -184,6 +176,8 @@ public class Access extends AbstractApiBean { UserNotificationServiceBean userNotificationService; @EJB FileDownloadServiceBean fileDownloadService; + @EJB + AuxiliaryFileServiceBean auxiliaryFileService; @Inject PermissionsWrapper permissionsWrapper; @Inject @@ -1084,6 +1078,65 @@ private String getWebappImageResource(String imageName) { } */ + /** + * + * @param fileId + * @param formatTag + * @param formatVersion + * @param origin + * @param isPublic + * @param fileInputStream + * @param contentDispositionHeader + * @param formDataBodyPart + * @return + */ + @Path("datafile/{fileId}/metadata/{formatTag}/{formatVersion}") + @POST + @Consumes(MediaType.MULTIPART_FORM_DATA) + + public Response saveAuxiliaryFileWithVersion(@PathParam("fileId") Long fileId, + @PathParam("formatTag") String formatTag, + @PathParam("formatVersion") String formatVersion, + @FormDataParam("origin") String origin, + @FormDataParam("isPublic") boolean isPublic, + @FormDataParam("file") InputStream fileInputStream, + @FormDataParam("file") FormDataContentDisposition contentDispositionHeader, + @FormDataParam("file") final FormDataBodyPart formDataBodyPart + ) { + AuthenticatedUser authenticatedUser; + try { + authenticatedUser = findAuthenticatedUserOrDie(); + } catch (WrappedResponse ex) { + return error(FORBIDDEN, "Authorized users only."); + } + + DataFile dataFile = dataFileService.find(fileId); + if (dataFile == null) { + return error(BAD_REQUEST, "File not found based on id " + fileId + "."); + } + + if (!permissionService.userOn(authenticatedUser, dataFile.getOwner()).has(Permission.EditDataset)) { + return error(FORBIDDEN, "User not authorized to edit the dataset."); + } + + if (!dataFile.isTabularData()) { + return error(BAD_REQUEST, "Not a tabular DataFile (db id=" + fileId + ")"); + } + + + boolean saved = auxiliaryFileService.processAuxiliaryFile(fileInputStream, dataFile, formatTag, formatVersion, origin, isPublic); + + if (saved) { + return ok("Auxiliary file has been saved."); + } else { + return error(BAD_REQUEST, "Error saving Auxiliary file."); + } + } + + + + + /** * Allow (or disallow) access requests to Dataset * @@ -1835,5 +1888,5 @@ private URI handleCustomZipDownload(String customZipServiceUrl, String fileIds, throw new BadRequestException(); } return redirectUri; - } + } } diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index ad0c93a80d1..ba646aa3592 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -6,20 +6,23 @@ package edu.harvard.iq.dataverse.api; import com.jayway.restassured.RestAssured; +import static com.jayway.restassured.RestAssured.given; import com.jayway.restassured.path.json.JsonPath; import com.jayway.restassured.response.Response; import edu.harvard.iq.dataverse.DataFile; +import static edu.harvard.iq.dataverse.api.UtilIT.API_TOKEN_HTTP_HEADER; import edu.harvard.iq.dataverse.util.FileUtil; import java.io.IOException; import java.util.zip.ZipInputStream; -import static javax.ws.rs.core.Response.Status.OK; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import java.util.zip.ZipEntry; import java.io.ByteArrayOutputStream; +import java.io.File; import java.io.InputStream; import java.util.HashMap; +import static javax.ws.rs.core.Response.Status.OK; import org.hamcrest.collection.IsMapContaining; import static junit.framework.Assert.assertEquals; import static org.junit.Assert.assertNotNull; @@ -27,12 +30,6 @@ import static org.junit.Assert.assertTrue; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; -import static junit.framework.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; -import static org.hamcrest.CoreMatchers.is; -import static org.hamcrest.CoreMatchers.not; /** * @@ -156,6 +153,7 @@ public static void setUp() throws InterruptedException { String tab4PathToFile = "scripts/search/data/tabular/" + tabFile4NameUnpublished; Response tab4AddResponse = UtilIT.uploadFileViaNative(datasetId.toString(), tab4PathToFile, apiToken); tabFile4IdUnpublished = JsonPath.from(tab4AddResponse.body().asString()).getInt("data.files[0].dataFile.id"); + assertTrue("Failed test if Ingest Lock exceeds max duration " + tabFile2Name, UtilIT.sleepForLock(datasetId.longValue(), "Ingest", apiToken, UtilIT.MAXIMUM_INGEST_LOCK_DURATION)); } @@ -172,6 +170,18 @@ public static void tearDown() { } + @Test + public void testSaveAuxiliaryFileWithVersion() { + System.out.println("Add aux file with update"); + String mimeType = null; + String pathToFile = "scripts/search/data/tabular/1char"; + Response response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .multiPart("file", new File(pathToFile), mimeType) + .post("/api/access/datafile/" + tabFile1Id + "/metadata/dpJSON/v1"); + response.prettyPrint(); + assertEquals(200, response.getStatusCode()); + } //This test does a lot of testing of non-original downloads as well @Test From 15a782a4804cf97a99bd90499f3fb6eee1ebe14a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 16 Oct 2020 11:51:08 -0400 Subject: [PATCH 003/179] name check but ignore fileToReplace name --- .../java/edu/harvard/iq/dataverse/EditDatafilesPage.java | 2 +- .../api/datadeposit/MediaResourceManagerImpl.java | 2 +- .../dataverse/datasetutility/AddReplaceFileHelper.java | 2 +- .../harvard/iq/dataverse/ingest/IngestServiceBean.java | 7 ++----- .../java/edu/harvard/iq/dataverse/ingest/IngestUtil.java | 9 ++++++--- 5 files changed, 11 insertions(+), 11 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index b4feecfcdf4..06960f45c9b 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -1128,7 +1128,7 @@ public String save() { } // Try to save the NEW files permanently: - List filesAdded = ingestService.saveAndAddFilesToDataset(workingVersion, newFiles, false); + List filesAdded = ingestService.saveAndAddFilesToDataset(workingVersion, newFiles, null); // reset the working list of fileMetadatas, as to only include the ones // that have been added to the version successfully: diff --git a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java index 23730885aab..84095e936ab 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/datadeposit/MediaResourceManagerImpl.java @@ -335,7 +335,7 @@ DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials au throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + violation.getMessage() + " The invalid value was \"" + violation.getInvalidValue() + "\"."); } else { - ingestService.saveAndAddFilesToDataset(editVersion, dataFiles, false); + ingestService.saveAndAddFilesToDataset(editVersion, dataFiles, null); } } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java index ab34b5b2675..ea1cfc38cfa 100644 --- a/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java +++ b/src/main/java/edu/harvard/iq/dataverse/datasetutility/AddReplaceFileHelper.java @@ -1501,7 +1501,7 @@ private boolean step_060_addFilesViaIngestService(){ } int nFiles = finalFileList.size(); - finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, isFileReplaceOperation()); + finalFileList = ingestService.saveAndAddFilesToDataset(workingVersion, finalFileList, fileToReplace); if (nFiles != finalFileList.size()) { if (nFiles == 1) { diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java index f5eeaa1c316..ff114646d38 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestServiceBean.java @@ -155,7 +155,7 @@ public class IngestServiceBean { // DataFileCategory objects, if any were already assigned to the files). // It must be called before we attempt to permanently save the files in // the database by calling the Save command on the dataset and/or version. - public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, boolean isReplaceOperation) { + public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles, DataFile fileToReplace) { List ret = new ArrayList<>(); if (newFiles != null && newFiles.size() > 0) { @@ -164,10 +164,7 @@ public List saveAndAddFilesToDataset(DatasetVersion version, List newFiles) { + public static void checkForDuplicateFileNamesFinal(DatasetVersion version, List newFiles, DataFile fileToReplace) { // Step 1: create list of existing path names from all FileMetadata in the DatasetVersion // unique path name: directoryLabel + file separator + fileLabel Set pathNamesExisting = existingPathNamesAsSet(version); - + if(fileToReplace!=null) { + pathNamesExisting.removeAll(existingPathNamesAsSet(version, fileToReplace.getFileMetadata())); + } // Step 2: check each new DataFile against the list of path names, if a duplicate create a new unique file name for (Iterator dfIt = newFiles.iterator(); dfIt.hasNext();) { @@ -248,7 +251,7 @@ public static Set existingPathNamesAsSet(DatasetVersion version) { return existingPathNamesAsSet(version, null); } - private static Set existingPathNamesAsSet(DatasetVersion version, FileMetadata fileMetadata) { + public static Set existingPathNamesAsSet(DatasetVersion version, FileMetadata fileMetadata) { Set pathNamesExisting = new HashSet<>(); // create list of existing path names from all FileMetadata in the DatasetVersion From 591c427d892c4560dcb7bfa6f9c34908cacba240 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 16 Oct 2020 11:58:03 -0400 Subject: [PATCH 004/179] update to match --- src/main/java/edu/harvard/iq/dataverse/DatasetPage.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 7f1a632a86a..9b8af888b60 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -3619,7 +3619,7 @@ public String save() { // have been created in the dataset. dataset = datasetService.find(dataset.getId()); - List filesAdded = ingestService.saveAndAddFilesToDataset(dataset.getEditVersion(), newFiles, false); + List filesAdded = ingestService.saveAndAddFilesToDataset(dataset.getEditVersion(), newFiles, null); newFiles.clear(); // and another update command: From bea1ed24a9b09131fd967731c3409e3380dab4b0 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 16 Oct 2020 12:33:28 -0400 Subject: [PATCH 005/179] update test and add new replace test --- .../iq/dataverse/ingest/IngestUtilTest.java | 117 ++++++++++++++++-- 1 file changed, 106 insertions(+), 11 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java index abc773971c2..054311ae5da 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java @@ -98,7 +98,7 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { dataFileList.add(datafile2); - IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList); + IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); boolean file1NameAltered = false; boolean file2NameAltered = false; @@ -111,12 +111,12 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } } - // check filenames are unique and unaltered + // check filenames are unique and altered assertEquals(file1NameAltered, true); assertEquals(file2NameAltered, true); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" - IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList); + IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); for (DataFile df : dataFileList) { if (df.getFileMetadata().getLabel().equals("datafile1-2.txt")) { @@ -127,7 +127,7 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } } - // check filenames are unique and unaltered + // check filenames are unique and altered assertEquals(file1NameAltered, true); assertEquals(file2NameAltered, true); } @@ -204,7 +204,7 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce dataFileList.add(datafile2); - IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList); + IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); boolean file1NameAltered = false; boolean file2NameAltered = false; @@ -217,12 +217,12 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } } - // check filenames are unique and unaltered + // check filenames are unique and altered assertEquals(file1NameAltered, true); assertEquals(file2NameAltered, true); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" - IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList); + IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); for (DataFile df : dataFileList) { if (df.getFileMetadata().getLabel().equals("datafile1-2.txt")) { @@ -233,7 +233,7 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } } - // check filenames are unique and unaltered + // check filenames are unique and altered assertEquals(file1NameAltered, true); assertEquals(file2NameAltered, true); } @@ -329,7 +329,7 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { dataFileList.add(datafile3); - IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList); + IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); boolean file1NameAltered = false; boolean file2NameAltered = false; @@ -356,7 +356,7 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { fmd3.setDatasetVersion(datasetVersion); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" - IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList); + IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); for (DataFile df : dataFileList) { if (df.getFileMetadata().getLabel().equals("datafile1-2.txt")) { @@ -447,7 +447,7 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception { dataFileList.add(datafile2); - IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList); + IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); boolean file2NameAltered = false; for (DataFile df : dataFileList) { @@ -460,6 +460,101 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception { assertEquals(file2NameAltered, true); } + + @Test + /** + * Test adding duplicate file name labels to a dataset version with empty + * directory labels when replacing a file. This should simulate what happens when replacing a file + * via the file upload UI. + */ + public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception { + + SimpleDateFormat dateFmt = new SimpleDateFormat("yyyyMMdd"); + + // create dataset + Dataset dataset = makeDataset(); + + // create dataset version + DatasetVersion datasetVersion = dataset.getEditVersion(); + datasetVersion.setCreateTime(dateFmt.parse("20001012")); + datasetVersion.setLastUpdateTime(datasetVersion.getLastUpdateTime()); + datasetVersion.setId(MocksFactory.nextId()); + datasetVersion.setReleaseTime(dateFmt.parse("20010101")); + datasetVersion.setVersionState(DatasetVersion.VersionState.RELEASED); + datasetVersion.setMinorVersionNumber(0L); + datasetVersion.setVersionNumber(1L); + datasetVersion.setFileMetadatas(new ArrayList<>()); + + // create datafiles + List dataFileList = new ArrayList<>(); + DataFile datafile1 = new DataFile("application/octet-stream"); + datafile1.setStorageIdentifier("datafile1.txt"); + datafile1.setFilesize(200); + datafile1.setModificationTime(new Timestamp(new Date().getTime())); + datafile1.setCreateDate(new Timestamp(new Date().getTime())); + datafile1.setPermissionModificationTime(new Timestamp(new Date().getTime())); + datafile1.setOwner(dataset); + datafile1.setIngestDone(); + datafile1.setChecksumType(DataFile.ChecksumType.SHA1); + datafile1.setChecksumValue("Unknown"); + + // set metadata and add verson + FileMetadata fmd1 = new FileMetadata(); + fmd1.setId(1L); + fmd1.setLabel("datafile1.txt"); + fmd1.setDirectoryLabel(""); + fmd1.setDataFile(datafile1); + datafile1.getFileMetadatas().add(fmd1); + datasetVersion.getFileMetadatas().add(fmd1); + fmd1.setDatasetVersion(datasetVersion); + + dataFileList.add(datafile1); + + DataFile datafile2 = new DataFile("application/octet-stream"); + datafile2.setStorageIdentifier("datafile2.txt"); + datafile2.setFilesize(200); + datafile2.setModificationTime(new Timestamp(new Date().getTime())); + datafile2.setCreateDate(new Timestamp(new Date().getTime())); + datafile2.setPermissionModificationTime(new Timestamp(new Date().getTime())); + datafile2.setOwner(dataset); + datafile2.setIngestDone(); + datafile2.setChecksumType(DataFile.ChecksumType.SHA1); + datafile2.setChecksumValue("Unknown"); + + // set metadata and add version + FileMetadata fmd2 = new FileMetadata(); + fmd2.setId(2L); + fmd2.setLabel("datafile2.txt"); + fmd2.setDirectoryLabel(""); + fmd2.setDataFile(datafile2); + datafile2.getFileMetadatas().add(fmd2); + datasetVersion.getFileMetadatas().add(fmd2); + fmd2.setDatasetVersion(datasetVersion); + + dataFileList.add(datafile2); + + /*In a real replace, there should only be one file in dataFileList. Having both files in dataFileList, we're essentially testing two cases at once: + * - the replacing file name conflicts with some other file's name + * - the replacing file's name only conflicts with the file being replaced (datafile2) and shouldn't be changed + */ + IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, datafile2); + + boolean file1NameAltered = false; + boolean file2NameAltered = false; + for (DataFile df : dataFileList) { + if (df.getFileMetadata().getLabel().equals("datafile1-1.txt")) { + file1NameAltered = true; + } + if (df.getFileMetadata().getLabel().equals("datafile2-1.txt")) { + file2NameAltered = true; + } + } + + // check filenames are unique and unaltered + assertEquals(file1NameAltered, true); + assertEquals(file2NameAltered, false); + } + @Test public void testDirectoryLabels() { From d2cea2b394c9b6e5d266691d29f23d7aeeede083 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 16 Oct 2020 12:46:43 -0400 Subject: [PATCH 006/179] invert logic - method already excludes the one filemetadata --- .../java/edu/harvard/iq/dataverse/ingest/IngestUtil.java | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java index 422b5e803a1..d112b7edbc0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java @@ -63,10 +63,7 @@ public static void checkForDuplicateFileNamesFinal(DatasetVersion version, List< // Step 1: create list of existing path names from all FileMetadata in the DatasetVersion // unique path name: directoryLabel + file separator + fileLabel - Set pathNamesExisting = existingPathNamesAsSet(version); - if(fileToReplace!=null) { - pathNamesExisting.removeAll(existingPathNamesAsSet(version, fileToReplace.getFileMetadata())); - } + Set pathNamesExisting = existingPathNamesAsSet(version, fileToReplace.getFileMetadata()); // Step 2: check each new DataFile against the list of path names, if a duplicate create a new unique file name for (Iterator dfIt = newFiles.iterator(); dfIt.hasNext();) { From 10fc3dc3a7c3b3a3b5301feb18f10aac5a9ab6cc Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 16 Oct 2020 12:50:50 -0400 Subject: [PATCH 007/179] and null check --- src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java index d112b7edbc0..dcf3104da7c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java @@ -63,7 +63,7 @@ public static void checkForDuplicateFileNamesFinal(DatasetVersion version, List< // Step 1: create list of existing path names from all FileMetadata in the DatasetVersion // unique path name: directoryLabel + file separator + fileLabel - Set pathNamesExisting = existingPathNamesAsSet(version, fileToReplace.getFileMetadata()); + Set pathNamesExisting = existingPathNamesAsSet(version, ((fileToReplace == null) ? null : fileToReplace.getFileMetadata())); // Step 2: check each new DataFile against the list of path names, if a duplicate create a new unique file name for (Iterator dfIt = newFiles.iterator(); dfIt.hasNext();) { From 91748b3e395c78b176eab1fd9f66085cfaae36bb Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 19 Oct 2020 18:44:09 -0400 Subject: [PATCH 008/179] modified framework for the download/GET part of the new aux. metadata API. will post more info tomorrow, about what may still needs to be done there. #7275 --- .../harvard/iq/dataverse/AuxiliaryFile.java | 20 ++++++ .../dataverse/AuxiliaryFileServiceBean.java | 21 +++++++ .../edu/harvard/iq/dataverse/api/Access.java | 61 +++++++++++++++---- .../iq/dataverse/api/DownloadInstance.java | 15 +++++ .../dataverse/api/DownloadInstanceWriter.java | 12 ++++ 5 files changed, 116 insertions(+), 13 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java index 1c6c5708fe5..55655e3974f 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java @@ -37,6 +37,10 @@ public class AuxiliaryFile implements Serializable { private String origin; private boolean isPublic; + + private String contentType; + + private Long fileSize; public Long getId() { return id; @@ -86,5 +90,21 @@ public void setIsPublic(boolean isPublic) { this.isPublic = isPublic; } + public String getContentType() { + // TODO: hard-coded for testing: + return "application/json"; + //return contentType; + } + + public void setContentType(String contentType) { + this.contentType = contentType; + } + + public Long getFileSize() { + return fileSize; + } + public void setFileSize(long fileSize) { + this.fileSize = fileSize; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java index 01b0ee6e865..2c09ff3f6a1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java @@ -9,6 +9,7 @@ import javax.inject.Named; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; +import javax.persistence.Query; /** * @@ -64,6 +65,9 @@ public boolean processAuxiliaryFile(InputStream fileInputStream, DataFile dataFi auxFile.setOrigin(origin); auxFile.setIsPublic(isPublic); auxFile.setDataFile(dataFile); + // TODO: mime type! + //auxFile.setContentType(mimeType); + auxFile.setFileSize(storageIO.getAuxObjectSize(auxExtension)); save(auxFile); } catch (IOException ioex) { logger.info("IO Exception trying to save auxiliary file: " + ioex.getMessage()); @@ -79,5 +83,22 @@ public boolean processAuxiliaryFile(InputStream fileInputStream, DataFile dataFi } return true; } + + // Looks up an auxiliary file by its parent DataFile, the formatTag and version + // TODO: improve as needed. + public AuxiliaryFile lookupAuxiliaryFile(DataFile dataFile, String formatTag, String formatVersion) { + + Query query = em.createQuery("select object(o) from AuxiliaryFile as o where o.dataFile.id = :dataFileId and o.formatTag = :formatTag and o.formatVersion = :formatVersion"); + + query.setParameter("dataFileId", dataFile.getId()); + query.setParameter("formatTag", formatTag); + query.setParameter("formatVersion", formatVersion); + try { + AuxiliaryFile retVal = (AuxiliaryFile)query.getSingleResult(); + return retVal; + } catch(Exception ex) { + return null; + } + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/Access.java b/src/main/java/edu/harvard/iq/dataverse/api/Access.java index f05dd02e0a4..3a5c38dec64 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/Access.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/Access.java @@ -6,6 +6,7 @@ package edu.harvard.iq.dataverse.api; +import edu.harvard.iq.dataverse.AuxiliaryFile; import edu.harvard.iq.dataverse.AuxiliaryFileServiceBean; import edu.harvard.iq.dataverse.DataCitation; import edu.harvard.iq.dataverse.DataFile; @@ -499,16 +500,20 @@ public String dataVariableMetadataDDI(@PathParam("varId") Long varId, @QueryPara } /* - * "Preprocessed data" metadata format: - * (this was previously provided as a "format conversion" option of the - * file download form of the access API call) + * GET method for retrieving various auxiliary files associated with + * a tabular datafile. */ - @Path("datafile/{fileId}/metadata/preprocessed") + @Path("datafile/{fileId}/metadata/{formatTag}/{formatVersion}") @GET - @Produces({"text/xml"}) - public DownloadInstance tabularDatafileMetadataPreprocessed(@PathParam("fileId") String fileId, @QueryParam("key") String apiToken, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) throws ServiceUnavailableException { + public DownloadInstance tabularDatafileMetadataPreprocessed(@PathParam("fileId") String fileId, + @PathParam("formatTag") String formatTag, + @PathParam("formatVersion") String formatVersion, + @QueryParam("key") String apiToken, + @Context UriInfo uriInfo, + @Context HttpHeaders headers, + @Context HttpServletResponse response) throws ServiceUnavailableException { DataFile df = findDataFileOrDieWrapper(fileId); @@ -516,18 +521,48 @@ public DownloadInstance tabularDatafileMetadataPreprocessed(@PathParam("fileId") apiToken = headers.getHeaderString(API_KEY_HEADER); } - // This will throw a ForbiddenException if access isn't authorized: - checkAuthorization(df, apiToken); DownloadInfo dInfo = new DownloadInfo(df); + boolean publiclyAvailable = false; - if (df.isTabularData()) { + if (!df.isTabularData()) { + throw new BadRequestException("tabular data required"); + } + + DownloadInstance downloadInstance; + AuxiliaryFile auxFile = null; + + // formatTag=preprocessed is handled as a special case. + // This is (as of now) the only aux. tabular metadata format that Dataverse + // can generate (and cache) itself. (All the other formats served have + // to be deposited first, by the @POST version of this API). + + if ("preprocessed".equals(formatTag)) { dInfo.addServiceAvailable(new OptionalAccessService("preprocessed", "application/json", "format=prep", "Preprocessed data in JSON")); + downloadInstance = new DownloadInstance(dInfo); + if (downloadInstance.checkIfServiceSupportedAndSetConverter("format", "prep")) { + logger.fine("Preprocessed data for tabular file "+fileId); + } } else { - throw new BadRequestException("tabular data required"); + // All other (deposited) formats: + auxFile = auxiliaryFileService.lookupAuxiliaryFile(df, formatTag, formatVersion); + + if (auxFile == null) { + throw new NotFoundException("Auxiliary metadata format "+formatTag+" is not available for datafile "+fileId); + } + + if (auxFile.getIsPublic()) { + publiclyAvailable = true; + } + downloadInstance = new DownloadInstance(dInfo); + downloadInstance.setAuxiliaryFile(auxFile); } - DownloadInstance downloadInstance = new DownloadInstance(dInfo); - if (downloadInstance.checkIfServiceSupportedAndSetConverter("format", "prep")) { - logger.fine("Preprocessed data for tabular file "+fileId); + + // Unless this format is explicitly authorized to be publicly available, + // the following will check access authorization (based on the access rules + // as defined for the DataFile itself), and will throw a ForbiddenException + // if access is denied: + if (!publiclyAvailable) { + checkAuthorization(df, apiToken); } return downloadInstance; diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java index 7e354bea24b..07215cb919e 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstance.java @@ -6,6 +6,7 @@ package edu.harvard.iq.dataverse.api; //import java.io.ByteArrayOutputStream; +import edu.harvard.iq.dataverse.AuxiliaryFile; import edu.harvard.iq.dataverse.DataverseRequestServiceBean; import edu.harvard.iq.dataverse.EjbDataverseEngine; import edu.harvard.iq.dataverse.GuestbookResponse; @@ -47,6 +48,12 @@ public void setExtraArguments(List extraArguments) { private String conversionParam = null; private String conversionParamValue = null; + // This download instance is for an auxiliary file associated with + // the DataFile. Unlike "conversions" (above) this is used for files + // that Dataverse has no way of producing/deriving from the parent Datafile + // itself, that have to be deposited externally. + private AuxiliaryFile auxiliaryFile = null; + private EjbDataverseEngine command; private DataverseRequestServiceBean dataverseRequestService; @@ -210,4 +217,12 @@ public void setDataverseRequestService(DataverseRequestServiceBean dataverseRequ this.dataverseRequestService = dataverseRequestService; } + public AuxiliaryFile getAuxiliaryFile() { + return auxiliaryFile; + } + + public void setAuxiliaryFile(AuxiliaryFile auxiliaryFile) { + this.auxiliaryFile = auxiliaryFile; + } + } diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index b10412a577d..1a8cca15595 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -227,6 +227,18 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] // (similarly to what the Access API returns when a thumbnail is requested on a text file, etc.) throw new NotFoundException("datafile access error: requested optional service (image scaling, format conversion, etc.) could not be performed on this datafile."); } + } else if (di.getAuxiliaryFile() != null) { + String auxTag = di.getAuxiliaryFile().getFormatTag(); + String auxVersion = di.getAuxiliaryFile().getFormatVersion(); + if (auxVersion != null) { + auxTag = auxTag + "_" + auxVersion; + } + long auxFileSize = di.getAuxiliaryFile().getFileSize(); + InputStreamIO auxStreamIO = new InputStreamIO(storageIO.getAuxFileAsInputStream(auxTag), auxFileSize); + auxStreamIO.setFileName(storageIO.getFileName() + "." + auxTag); + auxStreamIO.setMimeType(di.getAuxiliaryFile().getContentType()); + storageIO = auxStreamIO; + } else { if (storageIO instanceof S3AccessIO && !(dataFile.isTabularData()) && ((S3AccessIO) storageIO).downloadRedirectEnabled()) { // definitely close the (still open) S3 input stream, From d6ce1d7e5066a6ad2c68f5f6aac6974fa799c1e9 Mon Sep 17 00:00:00 2001 From: ellenk Date: Thu, 22 Oct 2020 14:56:28 -0400 Subject: [PATCH 009/179] added checksum and contentType to AuxiliaryFile entity --- .../harvard/iq/dataverse/AuxiliaryFile.java | 16 +++++++++++++--- .../iq/dataverse/AuxiliaryFileServiceBean.java | 18 ++++++++++++++---- .../edu/harvard/iq/dataverse/api/AccessIT.java | 11 ++++++++++- 3 files changed, 37 insertions(+), 8 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java index 55655e3974f..957a7cc93bf 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFile.java @@ -41,6 +41,8 @@ public class AuxiliaryFile implements Serializable { private String contentType; private Long fileSize; + + private String checksum; public Long getId() { return id; @@ -91,9 +93,7 @@ public void setIsPublic(boolean isPublic) { } public String getContentType() { - // TODO: hard-coded for testing: - return "application/json"; - //return contentType; + return this.contentType; } public void setContentType(String contentType) { @@ -107,4 +107,14 @@ public Long getFileSize() { public void setFileSize(long fileSize) { this.fileSize = fileSize; } + + public String getChecksum() { + return checksum; + } + + public void setChecksum(String checksum) { + this.checksum = checksum; + } + + } diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java index 2c09ff3f6a1..9fe6181ff92 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java @@ -2,14 +2,18 @@ package edu.harvard.iq.dataverse; import edu.harvard.iq.dataverse.dataaccess.StorageIO; +import edu.harvard.iq.dataverse.util.FileUtil; +import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; import java.io.InputStream; import java.util.logging.Logger; +import javax.ejb.EJB; import javax.ejb.Stateless; import javax.inject.Named; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.Query; +import org.apache.tika.Tika; /** * @@ -23,6 +27,10 @@ public class AuxiliaryFileServiceBean implements java.io.Serializable { @PersistenceContext(unitName = "VDCNet-ejbPU") private EntityManager em; + + @EJB + private SystemConfig systemConfig; + public AuxiliaryFile find(Object pk) { return em.find(AuxiliaryFile.class, pk); @@ -58,15 +66,17 @@ public boolean processAuxiliaryFile(InputStream fileInputStream, DataFile dataFi // If the db fails for any reason, then rollback // by removing the auxfile from storage. storageIO = dataFile.getStorageIO(); - storageIO.saveInputStreamAsAux(fileInputStream, auxExtension); AuxiliaryFile auxFile = new AuxiliaryFile(); + storageIO.saveInputStreamAsAux(fileInputStream, auxExtension); + auxFile.setChecksum(FileUtil.calculateChecksum(storageIO.getAuxFileAsInputStream(auxExtension), systemConfig.getFileFixityChecksumAlgorithm())); + + Tika tika = new Tika(); + auxFile.setContentType(tika.detect(storageIO.getAuxFileAsInputStream(auxExtension))); auxFile.setFormatTag(formatTag); auxFile.setFormatVersion(formatVersion); auxFile.setOrigin(origin); auxFile.setIsPublic(isPublic); - auxFile.setDataFile(dataFile); - // TODO: mime type! - //auxFile.setContentType(mimeType); + auxFile.setDataFile(dataFile); auxFile.setFileSize(storageIO.getAuxObjectSize(auxExtension)); save(auxFile); } catch (IOException ioex) { diff --git a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java index ba646aa3592..4fb1271c8c9 100644 --- a/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java +++ b/src/test/java/edu/harvard/iq/dataverse/api/AccessIT.java @@ -21,6 +21,7 @@ import java.io.ByteArrayOutputStream; import java.io.File; import java.io.InputStream; +import java.nio.file.Path; import java.util.HashMap; import static javax.ws.rs.core.Response.Status.OK; import org.hamcrest.collection.IsMapContaining; @@ -171,7 +172,7 @@ public static void tearDown() { @Test - public void testSaveAuxiliaryFileWithVersion() { + public void testSaveAuxiliaryFileWithVersion() throws IOException { System.out.println("Add aux file with update"); String mimeType = null; String pathToFile = "scripts/search/data/tabular/1char"; @@ -181,6 +182,14 @@ public void testSaveAuxiliaryFileWithVersion() { .post("/api/access/datafile/" + tabFile1Id + "/metadata/dpJSON/v1"); response.prettyPrint(); assertEquals(200, response.getStatusCode()); + System.out.println("Downloading Aux file that was just added"); + response = given() + .header(API_TOKEN_HTTP_HEADER, apiToken) + .get("/api/access/datafile/" + tabFile1Id + "/metadata/dpJSON/v1"); + + String dataStr = response.prettyPrint(); + assertEquals(dataStr,"a\n"); + assertEquals(200, response.getStatusCode()); } //This test does a lot of testing of non-original downloads as well From 1a122d4e93bf5e55307445774d2a1c64e9e2828e Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 23 Oct 2020 18:44:05 +0200 Subject: [PATCH 010/179] Remove reference_data.sql usages. #7256 --- scripts/installer/Makefile | 9 +---- scripts/installer/README.txt | 4 -- scripts/installer/install | 71 +----------------------------------- scripts/installer/install.py | 27 -------------- 4 files changed, 3 insertions(+), 108 deletions(-) diff --git a/scripts/installer/Makefile b/scripts/installer/Makefile index 7e002e8fa61..5155edfde1f 100644 --- a/scripts/installer/Makefile +++ b/scripts/installer/Makefile @@ -3,7 +3,6 @@ DISTRIBUTION_WAR_FILE=${INSTALLER_ZIP_DIR}/dataverse.war GLASSFISH_SETUP_SCRIPT=${INSTALLER_ZIP_DIR}/as-setup.sh POSTGRES_DRIVERS=${INSTALLER_ZIP_DIR}/pgdriver API_SCRIPTS=${INSTALLER_ZIP_DIR}/setup-datasetfields.sh ${INSTALLER_ZIP_DIR}/setup-users.sh ${INSTALLER_ZIP_DIR}/setup-builtin-roles.sh ${INSTALLER_ZIP_DIR}/setup-dvs.sh ${INSTALLER_ZIP_DIR}/data ${INSTALLER_ZIP_DIR}/setup-identity-providers.sh ${INSTALLER_ZIP_DIR}/setup-all.sh ${INSTALLER_ZIP_DIR}/post-install-api-block.sh -DB_SCRIPT=${INSTALLER_ZIP_DIR}/reference_data.sql JHOVE_CONFIG=${INSTALLER_ZIP_DIR}/jhove.conf JHOVE_SCHEMA=${INSTALLER_ZIP_DIR}/jhoveConfig.xsd SOLR_SCHEMA=${INSTALLER_ZIP_DIR}/schema.xml ${INSTALLER_ZIP_DIR}/schema_dv_mdb_fields.xml ${INSTALLER_ZIP_DIR}/schema_dv_mdb_copies.xml ${INSTALLER_ZIP_DIR}/updateSchemaMDB.sh @@ -16,9 +15,9 @@ installer: dvinstall.zip clean: /bin/rm -rf ${INSTALLER_ZIP_DIR} dvinstall.zip -dvinstall.zip: ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${JSF_PATCH_DIR} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${PYTHON_FILES} ${INSTALL_SCRIPT} +dvinstall.zip: ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${JSF_PATCH_DIR} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${PYTHON_FILES} ${INSTALL_SCRIPT} @echo making installer... - zip -r dvinstall.zip ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${JSF_PATCH_DIR} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${DB_SCRIPT} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${PYTHON_FILES} ${INSTALL_SCRIPT} + zip -r dvinstall.zip ${GLASSFISH_SETUP_SCRIPT} ${POSTGRES_DRIVERS} ${JSF_PATCH_DIR} ${DISTRIBUTION_WAR_FILE} ${API_SCRIPTS} ${JHOVE_CONFIG} ${JHOVE_SCHEMA} ${SOLR_SCHEMA} ${SOLR_CONFIG} ${PYTHON_FILES} ${INSTALL_SCRIPT} @echo @echo "Done!" @@ -55,10 +54,6 @@ ${API_SCRIPTS}: ../api/setup-datasetfields.sh ../api/setup-users.sh ../api/setup @echo copying api scripts /bin/cp -R ../api/setup-datasetfields.sh ../api/setup-users.sh ../api/setup-dvs.sh ../api/setup-identity-providers.sh ../api/setup-all.sh ../api/post-install-api-block.sh ../api/setup-builtin-roles.sh ../api/data ${INSTALLER_ZIP_DIR} -${DB_SCRIPT}: ../database/reference_data.sql ${INSTALLER_ZIP_DIR} - @echo copying reference data sql script - /bin/cp ../database/reference_data.sql ${INSTALLER_ZIP_DIR} - ${JHOVE_CONFIG}: ../../conf/jhove/jhove.conf ${INSTALLER_ZIP_DIR} @echo copying jhove config file /bin/cp ../../conf/jhove/jhove.conf ${INSTALLER_ZIP_DIR} diff --git a/scripts/installer/README.txt b/scripts/installer/README.txt index 81328b89ccd..ec11881c720 100644 --- a/scripts/installer/README.txt +++ b/scripts/installer/README.txt @@ -32,10 +32,6 @@ setup-identity-providers.sh setup-users.sh data (the entire directory with all its contents) -from scripts/database: - -reference_data.sql - from conf/jhove: jhove.conf diff --git a/scripts/installer/install b/scripts/installer/install index c804e1903e4..ffe98d48720 100755 --- a/scripts/installer/install +++ b/scripts/installer/install @@ -46,8 +46,7 @@ if (exists($ENV{'MY_POD_NAME'})) my $jq_exec_path = ""; my $psql_exec_path = ""; my $cwd; -my $WARFILE_LOCATION = "dataverse.war"; -my $REFERENCE_DATA_SQL = "../database/reference_data.sql"; +my $WARFILE_LOCATION = "dataverse.war"; my @CONFIG_VARIABLES; @@ -317,23 +316,6 @@ unless ($postgresonly) # 1b. check and remember the working dir: chomp( $cwd = `pwd` ); -# 1c. check if the reference data SQL file is there: - - $REFERENCE_DATA_SQL = "../database/reference_data.sql"; - - unless ( -f $REFERENCE_DATA_SQL ) { - # if it's not in ../database, then we're probably running out of the - # unzipped installer bundle, so it should be right here in the current directory: - $REFERENCE_DATA_SQL = "reference_data.sql"; - } - - unless ( -f $REFERENCE_DATA_SQL ) { - print "\nWARNING: Can't find .sql data template!\n"; - print "(are you running the installer in the right directory?)\n"; - - exit 0; - } - # 1d. jq executable: my $sys_path = $ENV{'PATH'}; @@ -563,15 +545,6 @@ my $glassfish_dir = $CONFIG_DEFAULTS{'GLASSFISH_DIRECTORY'}; my $done = &setup_appserver(); -# 7. POPULATE THE DATABASE WITH REFERENCE DATA -# TODO: move this further down -# (in this step some pre-supplied content is inserted into the database that we have just created; -# it is not *necessary* for the application to run in the very basic mode; but some features - certain -# types of metadata imports, for example - will be unavailable if it's not done. - -&import_reference_data(); - - # Check if the App is running: unless (( @@ -1513,48 +1486,6 @@ sub setup_postgres { } } -sub import_reference_data { - print "\npopulating the database with reference data:\n\n"; - - # (we have already verified that the referenceData.sql file exists) - - my $psql_command = $psql_exec_path . "/psql -h " . $CONFIG_DEFAULTS{'POSTGRES_SERVER'} - . " -p " . $CONFIG_DEFAULTS{'POSTGRES_PORT'} - . " -U " . $CONFIG_DEFAULTS{'POSTGRES_USER'} - . " -d " . $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} . " -f " . $REFERENCE_DATA_SQL; - - print "EXECUTING PSQL COMMAND: $psql_command\n"; - unless ( ( my $exitcode = system("$psql_command") ) == 0 ) - { - print "WARNING: Could not pre-populate Postgres database for the Dataverse application!\n"; - print "(command: " . $psql_command . ")\n"; - print "(psql exit code: " . $exitcode . ")\n"; - print "\nYou must populate the database in order for all the features of your \n"; - print "new Dataverse to be available. \n"; - print "\n"; - print "You can try this again, by executing the following on the command line:\n"; - print " psql -U $CONFIG_DEFAULTS{'POSTGRES_USER'} -d $CONFIG_DEFAULTS{'POSTGRES_DATABASE'} -f $REFERENCE_DATA_SQL\n"; - print "then re-start payara \n\n"; - print "\n"; - print "If it's still failing, please consult the installation manual and/or\n"; - print "seek support from the Dataverse team.\n\n"; - - print "Press any key to continue... "; - - unless ($noninteractive) - { - system "stty cbreak /dev/tty 2>&1"; - my $key = getc(STDIN); - system "stty -cbreak /dev/tty 2>&1"; - print "\n"; - } - } - else - { - print "\nOK, done!\n"; - } -} - sub read_config_defaults { my $config_file = shift @_; diff --git a/scripts/installer/install.py b/scripts/installer/install.py index a00250a44ce..40f91cea640 100644 --- a/scripts/installer/install.py +++ b/scripts/installer/install.py @@ -245,17 +245,6 @@ sys.exit("Sorry, I can't seem to find an appropriate warfile.\nAre you running the installer from the right directory?") print(warfile+" available to deploy. Good.") - # 1b. check for reference_data.sql - referenceData = '../database/reference_data.sql' - if not os.path.isfile(referenceData): - # if it's not there, then we're probably running out of the - # unzipped installer bundle, so it should be right here in the current directory: - referenceData = 'reference_data.sql' - if not os.path.isfile(referenceData): - sys.exit("Can't find reference_data.sql!\nAre you running the installer from the right directory?") - - print("found "+referenceData+"... good") - # 1c. check if jq is available # (but we're only doing it if it's not that weird "pod name" mode) if podName != "start-glassfish": @@ -558,22 +547,6 @@ # @todo: restart/try to deploy again if it failed? # @todo: if asadmin deploy says it was successful, verify that the application is running... if not - repeat the above? -# 6. Import reference data -print("importing reference data...") -# open the new postgresQL connection (as the application user): -conn_string="dbname='"+pgDb+"' user='"+pgUser+"' password='"+pgPassword+"' host='"+pgHost+"'" -conn = psycopg2.connect(conn_string) -conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) -cur = conn.cursor() -try: - cur.execute(open(referenceData, "r").read()) - print("done.") -except: - print("WARNING: failed to import reference data!") - -cur.close() -conn.close() - # 7. RUN SETUP SCRIPTS AND CONFIGURE EXTRA SETTINGS # (note that we may need to change directories, depending on whether this is a dev., or release installer) # 7a. run setup scripts From fc3dbb690778f1b3c3dc70bb0e34abec22b3a12b Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 23 Oct 2020 20:27:04 +0200 Subject: [PATCH 011/179] Move index creation from reference_data.sql into Flyway baseline. #7256 --- scripts/database/reference_data.sql | 13 ------------- .../db/migration/V1__flyway_schema_baseline.sql | 11 +++++++++++ 2 files changed, 11 insertions(+), 13 deletions(-) diff --git a/scripts/database/reference_data.sql b/scripts/database/reference_data.sql index 15ddedd4301..1bc6bcd4bab 100644 --- a/scripts/database/reference_data.sql +++ b/scripts/database/reference_data.sql @@ -26,19 +26,6 @@ INSERT INTO guestbook( VALUES ( false, true, false, now(), 'Default', false, false, null); --- TODO: Remove if http://stackoverflow.com/questions/25743191/how-to-add-a-case-insensitive-jpa-unique-constraint --- gets an answer. See also https://github.com/IQSS/dataverse/issues/2598#issuecomment-158219334 -CREATE UNIQUE INDEX dataverse_alias_unique_idx on dataverse (LOWER(alias)); -CREATE UNIQUE INDEX index_authenticateduser_lower_email ON authenticateduser (lower(email)); --- CREATE UNIQUE INDEX index_authenticateduser_lower_useridentifier ON authenticateduser (lower(useridentifier)); --- this field has been removed from builtinuser; CREATE UNIQUE INDEX index_builtinuser_lower_email ON builtinuser (lower(email)); - ---Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 ---This unique index will prevent the multiple draft issue -CREATE UNIQUE INDEX one_draft_version_per_dataset ON datasetversion -(dataset_id) WHERE versionstate='DRAFT'; - - INSERT INTO worldmapauth_tokentype ( name, created, diff --git a/src/main/resources/db/migration/V1__flyway_schema_baseline.sql b/src/main/resources/db/migration/V1__flyway_schema_baseline.sql index e69de29bb2d..2ec219cd19e 100644 --- a/src/main/resources/db/migration/V1__flyway_schema_baseline.sql +++ b/src/main/resources/db/migration/V1__flyway_schema_baseline.sql @@ -0,0 +1,11 @@ +-- TODO: we still should add the real base line here, too. That would avoid conflicts between EclipseLink +-- trying to create new tables on existing databases. See https://github.com/IQSS/dataverse/issues/5871 + +-- This is unsupported by JPA, as it is PostgreSQL specific. Has to be done here, cannot be done in code. +-- (Only other option would be a lowercase copy of the data as a separate column, automatically filled py JPA) +CREATE UNIQUE INDEX IF NOT EXISTS dataverse_alias_unique_idx on dataverse (LOWER(alias)); +CREATE UNIQUE INDEX IF NOT EXISTS index_authenticateduser_lower_email ON authenticateduser (lower(email)); + +-- Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 +-- This unique index will prevent the multiple draft issue, yet it cannot be done in JPA code. +CREATE UNIQUE INDEX IF NOT EXISTS one_draft_version_per_dataset ON datasetversion (dataset_id) WHERE versionstate='DRAFT'; \ No newline at end of file From 4611825c2f2a01088f9fb56991e085ff7cbf3831 Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 23 Oct 2020 21:01:17 +0200 Subject: [PATCH 012/179] Replace initial data insert from reference_data.sql with Flyway afterMigrate callback SQL script. #7256 --- scripts/database/reference_data.sql | 39 ----------------- .../afterMigrate__1-upsert-referenceData.sql | 42 +++++++++++++++++++ 2 files changed, 42 insertions(+), 39 deletions(-) delete mode 100644 scripts/database/reference_data.sql create mode 100644 src/main/resources/db/migration/afterMigrate__1-upsert-referenceData.sql diff --git a/scripts/database/reference_data.sql b/scripts/database/reference_data.sql deleted file mode 100644 index 1bc6bcd4bab..00000000000 --- a/scripts/database/reference_data.sql +++ /dev/null @@ -1,39 +0,0 @@ --- using http://dublincore.org/schemas/xmls/qdc/dcterms.xsd because at http://dublincore.org/schemas/xmls/ it's the schema location for http://purl.org/dc/terms/ which is referenced in http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html -INSERT INTO foreignmetadataformatmapping(id, name, startelement, displayName, schemalocation) VALUES (1, 'http://purl.org/dc/terms/', 'entry', 'dcterms: DCMI Metadata Terms', 'http://dublincore.org/schemas/xmls/qdc/dcterms.xsd'); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (1, ':title', 'title', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (2, ':identifier', 'otherIdValue', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (3, ':creator', 'authorName', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (4, ':date', 'productionDate', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (5, ':subject', 'keywordValue', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (6, ':description', 'dsDescriptionValue', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (7, ':relation', 'relatedMaterial', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (8, ':isReferencedBy', 'publicationCitation', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (9, 'holdingsURI', 'publicationURL', TRUE, 8, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (10, 'agency', 'publicationIDType', TRUE, 8, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (11, 'IDNo', 'publicationIDNumber', TRUE, 8, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (12, ':coverage', 'otherGeographicCoverage', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (13, ':type', 'kindOfData', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (14, ':source', 'dataSources', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (15, 'affiliation', 'authorAffiliation', TRUE, 3, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (16, ':contributor', 'contributorName', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (17, 'type', 'contributorType', TRUE, 16, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (18, ':publisher', 'producerName', FALSE, NULL, 1 ); -INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) VALUES (19, ':language', 'language', FALSE, NULL, 1 ); - -INSERT INTO guestbook( - emailrequired, enabled, institutionrequired, createtime, - "name", namerequired, positionrequired, dataverse_id) - VALUES ( false, true, false, now(), - 'Default', false, false, null); - -INSERT INTO worldmapauth_tokentype -( name, - created, - contactemail, hostname, ipaddress, - mapitlink, md5, - modified, timelimitminutes) - VALUES ( 'GEOCONNECT', current_timestamp, - 'support@dataverse.org', 'geoconnect.datascience.iq.harvard.edu', '140.247.115.127', - 'http://geoconnect.datascience.iq.harvard.edu/shapefile/map-it', - '38c0a931b2d582a5c43fc79405b30c22', - current_timestamp, 30); diff --git a/src/main/resources/db/migration/afterMigrate__1-upsert-referenceData.sql b/src/main/resources/db/migration/afterMigrate__1-upsert-referenceData.sql new file mode 100644 index 00000000000..60140b44485 --- /dev/null +++ b/src/main/resources/db/migration/afterMigrate__1-upsert-referenceData.sql @@ -0,0 +1,42 @@ +-- using http://dublincore.org/schemas/xmls/qdc/dcterms.xsd because at http://dublincore.org/schemas/xmls/ it's the +-- schema location for http://purl.org/dc/terms/ which is referenced in http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html +INSERT INTO foreignmetadataformatmapping (id, name, startelement, displayName, schemalocation) + VALUES + (1, 'http://purl.org/dc/terms/', 'entry', 'dcterms: DCMI Metadata Terms', 'http://dublincore.org/schemas/xmls/qdc/dcterms.xsd') + ON CONFLICT DO NOTHING; + +INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname, isattribute, parentfieldmapping_id, foreignmetadataformatmapping_id) + VALUES + (1, ':title', 'title', FALSE, NULL, 1 ), + (2, ':identifier', 'otherIdValue', FALSE, NULL, 1 ), + (3, ':creator', 'authorName', FALSE, NULL, 1 ), + (4, ':date', 'productionDate', FALSE, NULL, 1 ), + (5, ':subject', 'keywordValue', FALSE, NULL, 1 ), + (6, ':description', 'dsDescriptionValue', FALSE, NULL, 1 ), + (7, ':relation', 'relatedMaterial', FALSE, NULL, 1 ), + (8, ':isReferencedBy', 'publicationCitation', FALSE, NULL, 1 ), + (9, 'holdingsURI', 'publicationURL', TRUE, 8, 1 ), + (10, 'agency', 'publicationIDType', TRUE, 8, 1 ), + (11, 'IDNo', 'publicationIDNumber', TRUE, 8, 1 ), + (12, ':coverage', 'otherGeographicCoverage', FALSE, NULL, 1 ), + (13, ':type', 'kindOfData', FALSE, NULL, 1 ), + (14, ':source', 'dataSources', FALSE, NULL, 1 ), + (15, 'affiliation', 'authorAffiliation', TRUE, 3, 1 ), + (16, ':contributor', 'contributorName', FALSE, NULL, 1 ), + (17, 'type', 'contributorType', TRUE, 16, 1 ), + (18, ':publisher', 'producerName', FALSE, NULL, 1 ), + (19, ':language', 'language', FALSE, NULL, 1 ) + ON CONFLICT DO NOTHING; + +INSERT INTO guestbook (emailrequired, enabled, institutionrequired, createtime, name, namerequired, positionrequired, dataverse_id) + SELECT false, true, false, now(), 'Default', false, false, null + WHERE NOT EXISTS (SELECT id FROM guestbook); + +INSERT INTO worldmapauth_tokentype + (name, created, contactemail, hostname, + ipaddress, mapitlink, + md5, modified, timelimitminutes) + SELECT 'GEOCONNECT', current_timestamp, 'support@dataverse.org', 'geoconnect.datascience.iq.harvard.edu', + '140.247.115.127', 'http://geoconnect.datascience.iq.harvard.edu/shapefile/map-it', + '38c0a931b2d582a5c43fc79405b30c22', current_timestamp, 30 + WHERE NOT EXISTS (SELECT id from worldmapauth_tokentype); From 1e473163032c742eab609346223455abcdadc13f Mon Sep 17 00:00:00 2001 From: Leonid Andreev Date: Mon, 26 Oct 2020 10:38:42 -0400 Subject: [PATCH 013/179] Making sure there's no open inputstreams left. (#7275) --- .../edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java index 1a8cca15595..1624e9932e2 100644 --- a/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java +++ b/src/main/java/edu/harvard/iq/dataverse/api/DownloadInstanceWriter.java @@ -228,6 +228,8 @@ public void writeTo(DownloadInstance di, Class clazz, Type type, Annotation[] throw new NotFoundException("datafile access error: requested optional service (image scaling, format conversion, etc.) could not be performed on this datafile."); } } else if (di.getAuxiliaryFile() != null) { + // Make sure to close the InputStream for the main datafile: + try {storageIO.getInputStream().close();} catch (IOException ioex) {} String auxTag = di.getAuxiliaryFile().getFormatTag(); String auxVersion = di.getAuxiliaryFile().getFormatVersion(); if (auxVersion != null) { From 63cdf16e3be0e2aaa2a20139cdc08df1c085cd8a Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Tue, 27 Oct 2020 11:04:55 -0400 Subject: [PATCH 014/179] some fmds have null id, so compare files --- src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java index dcf3104da7c..13d4ed96815 100644 --- a/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/ingest/IngestUtil.java @@ -248,7 +248,7 @@ public static Set existingPathNamesAsSet(DatasetVersion version) { return existingPathNamesAsSet(version, null); } - public static Set existingPathNamesAsSet(DatasetVersion version, FileMetadata fileMetadata) { + public static Set existingPathNamesAsSet(DatasetVersion version, FileMetadata replacedFmd) { Set pathNamesExisting = new HashSet<>(); // create list of existing path names from all FileMetadata in the DatasetVersion @@ -257,7 +257,7 @@ public static Set existingPathNamesAsSet(DatasetVersion version, FileMet // #6942 added proxy for existing files to a boolean set when dataset version copy is done for (Iterator fmIt = version.getFileMetadatas().iterator(); fmIt.hasNext();) { FileMetadata fm = fmIt.next(); - if ((fm.isInPriorVersion() || fm.getId() != null) && (fileMetadata == null || !fm.getId().equals(fileMetadata.getId()))) { + if((fm.isInPriorVersion() || fm.getId() != null) && (replacedFmd==null) || (!fm.getDataFile().equals(replacedFmd.getDataFile()))) { String existingName = fm.getLabel(); String existingDir = fm.getDirectoryLabel(); From 51e0435c0e20a0912de5c967880dda310f60f448 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 30 Oct 2020 14:49:08 -0400 Subject: [PATCH 015/179] update test to match new duplicate test logic --- .../java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java index 054311ae5da..c94ea14219f 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java @@ -497,6 +497,7 @@ public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception { datafile1.setIngestDone(); datafile1.setChecksumType(DataFile.ChecksumType.SHA1); datafile1.setChecksumValue("Unknown"); + datafile1.setId(1L); // set metadata and add verson FileMetadata fmd1 = new FileMetadata(); @@ -520,6 +521,7 @@ public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception { datafile2.setIngestDone(); datafile2.setChecksumType(DataFile.ChecksumType.SHA1); datafile2.setChecksumValue("Unknown"); + datafile2.setId(2L); // set metadata and add version FileMetadata fmd2 = new FileMetadata(); From f49a62f953ab28b6a097671f889863d1b4372b04 Mon Sep 17 00:00:00 2001 From: Jim Myers Date: Fri, 30 Oct 2020 14:59:25 -0400 Subject: [PATCH 016/179] fix argument order affects error messages --- .../iq/dataverse/ingest/IngestUtilTest.java | 34 +++++++++---------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java index c94ea14219f..8e4b81ec921 100644 --- a/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java +++ b/src/test/java/edu/harvard/iq/dataverse/ingest/IngestUtilTest.java @@ -112,8 +112,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } // check filenames are unique and altered - assertEquals(file1NameAltered, true); - assertEquals(file2NameAltered, true); + assertEquals(true, file1NameAltered); + assertEquals(true, file2NameAltered); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); @@ -128,8 +128,8 @@ public void testCheckForDuplicateFileNamesNoDirectories() throws Exception { } // check filenames are unique and altered - assertEquals(file1NameAltered, true); - assertEquals(file2NameAltered, true); + assertEquals(true, file1NameAltered); + assertEquals(true, file2NameAltered); } @Test @@ -218,8 +218,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } // check filenames are unique and altered - assertEquals(file1NameAltered, true); - assertEquals(file2NameAltered, true); + assertEquals(true, file1NameAltered); + assertEquals(true, file2NameAltered); // try to add data files with "-1" duplicates and see if it gets incremented to "-2" IngestUtil.checkForDuplicateFileNamesFinal(datasetVersion, dataFileList, null); @@ -234,8 +234,8 @@ public void testCheckForDuplicateFileNamesWithEmptyDirectoryLabels() throws Exce } // check filenames are unique and altered - assertEquals(file1NameAltered, true); - assertEquals(file2NameAltered, true); + assertEquals(true, file1NameAltered); + assertEquals(true, file2NameAltered); } @Test @@ -347,9 +347,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { } // check filenames are unique - assertEquals(file1NameAltered, true); - assertEquals(file2NameAltered, true); - assertEquals(file3NameAltered, false); + assertEquals(true, file1NameAltered); + assertEquals(true, file2NameAltered); + assertEquals(false, file3NameAltered); // add duplicate file in root datasetVersion.getFileMetadatas().add(fmd3); @@ -371,9 +371,9 @@ public void testCheckForDuplicateFileNamesWithDirectories() throws Exception { } // check filenames are unique - assertEquals(file1NameAltered, true); - assertEquals(file2NameAltered, true); - assertEquals(file3NameAltered, true); + assertEquals(true, file1NameAltered); + assertEquals(true, file2NameAltered); + assertEquals(true, file3NameAltered); } @Test @@ -457,7 +457,7 @@ public void testCheckForDuplicateFileNamesTabular() throws Exception { } // check filename is altered since tabular and will change to .tab after ingest - assertEquals(file2NameAltered, true); + assertEquals(true, file2NameAltered); } @@ -553,8 +553,8 @@ public void testCheckForDuplicateFileNamesWhenReplacing() throws Exception { } // check filenames are unique and unaltered - assertEquals(file1NameAltered, true); - assertEquals(file2NameAltered, false); + assertEquals(true, file1NameAltered); + assertEquals(false, file2NameAltered); } @Test From 1af5a24dfb45f968e4f9e6e84d0f6f37121b17fa Mon Sep 17 00:00:00 2001 From: ellenk Date: Mon, 2 Nov 2020 16:18:30 -0500 Subject: [PATCH 017/179] Use DigestInputStream so that checksum can be calculated when stream is being written to storage --- .../harvard/iq/dataverse/AuxiliaryFileServiceBean.java | 10 ++++++++-- .../java/edu/harvard/iq/dataverse/util/FileUtil.java | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java index 9fe6181ff92..1ee3c5e7794 100644 --- a/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java +++ b/src/main/java/edu/harvard/iq/dataverse/AuxiliaryFileServiceBean.java @@ -6,6 +6,8 @@ import edu.harvard.iq.dataverse.util.SystemConfig; import java.io.IOException; import java.io.InputStream; +import java.security.DigestInputStream; +import java.security.MessageDigest; import java.util.logging.Logger; import javax.ejb.EJB; import javax.ejb.Stateless; @@ -67,8 +69,12 @@ public boolean processAuxiliaryFile(InputStream fileInputStream, DataFile dataFi // by removing the auxfile from storage. storageIO = dataFile.getStorageIO(); AuxiliaryFile auxFile = new AuxiliaryFile(); - storageIO.saveInputStreamAsAux(fileInputStream, auxExtension); - auxFile.setChecksum(FileUtil.calculateChecksum(storageIO.getAuxFileAsInputStream(auxExtension), systemConfig.getFileFixityChecksumAlgorithm())); + MessageDigest md = MessageDigest.getInstance(systemConfig.getFileFixityChecksumAlgorithm().toString()); + DigestInputStream di + = new DigestInputStream(fileInputStream, md); + + storageIO.saveInputStreamAsAux(fileInputStream, auxExtension); + auxFile.setChecksum(FileUtil.checksumDigestToString(di.getMessageDigest().digest()) ); Tika tika = new Tika(); auxFile.setContentType(tika.detect(storageIO.getAuxFileAsInputStream(auxExtension))); diff --git a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java index 7ed9970fe13..cb243b1b8b1 100644 --- a/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java +++ b/src/main/java/edu/harvard/iq/dataverse/util/FileUtil.java @@ -686,7 +686,7 @@ public static String calculateChecksum(byte[] dataBytes, ChecksumType checksumTy } - private static String checksumDigestToString(byte[] digestBytes) { + public static String checksumDigestToString(byte[] digestBytes) { StringBuilder sb = new StringBuilder(""); for (int i = 0; i < digestBytes.length; i++) { sb.append(Integer.toString((digestBytes[i] & 0xff) + 0x100, 16).substring(1)); From 141fa69ed7164ebfb44a4de3dc387a46d3514c8a Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 3 Nov 2020 09:57:04 -0500 Subject: [PATCH 018/179] adding stack logging --- .../edu/harvard/iq/dataverse/EjbDataverseEngine.java | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 36d0f550955..506e06f9452 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -22,6 +22,7 @@ import edu.harvard.iq.dataverse.search.IndexServiceBean; import edu.harvard.iq.dataverse.search.SearchServiceBean; import java.util.Map; +import java.util.Map.Entry; import java.util.Set; import javax.ejb.EJB; import javax.ejb.Stateless; @@ -46,6 +47,8 @@ import javax.validation.ConstraintViolation; import javax.validation.ConstraintViolationException; +import org.apache.log4j.lf5.LogLevel; + /** * An EJB capable of executing {@link Command}s in a JEE environment. * @@ -340,6 +343,15 @@ public CommandContext getContext() { @Override public void addCommand (Command command){ + + if(logger.isLoggable(Level.FINE)) { + try { + logger.fine("Current Command Stack: "); + commandsCalled.forEach((c)->{logger.fine("Command : " + c.getClass().getSimpleName() + "for DvObjects"); for(Map.Entry e : ((Map)c.getAffectedDvObjects()).entrySet()) {logger.fine(e.getKey() + " : " + e.getValue().getId());}}); + logger.fine("Adding command: " + command.getClass().getSimpleName() + " for DvObjects"); + for(Map.Entry e : ((Map)command.getAffectedDvObjects()).entrySet()) {logger.fine(e.getKey() + " : " + e.getValue().getId());} + } catch(Exception e) {logger.fine("Exception logging command stack: " + e.getMessage());}; + } commandsCalled.push(command); } From 99d8127f494576da3a2de855b04f2fd95af3bd80 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 3 Nov 2020 11:07:27 -0500 Subject: [PATCH 019/179] adding an instance number to each line to be able to figure out parallel calls --- .../iq/dataverse/EjbDataverseEngine.java | 35 +++++++++++++------ 1 file changed, 25 insertions(+), 10 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 506e06f9452..0fb5fd2a953 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -343,16 +343,31 @@ public CommandContext getContext() { @Override public void addCommand (Command command){ - - if(logger.isLoggable(Level.FINE)) { - try { - logger.fine("Current Command Stack: "); - commandsCalled.forEach((c)->{logger.fine("Command : " + c.getClass().getSimpleName() + "for DvObjects"); for(Map.Entry e : ((Map)c.getAffectedDvObjects()).entrySet()) {logger.fine(e.getKey() + " : " + e.getValue().getId());}}); - logger.fine("Adding command: " + command.getClass().getSimpleName() + " for DvObjects"); - for(Map.Entry e : ((Map)command.getAffectedDvObjects()).entrySet()) {logger.fine(e.getKey() + " : " + e.getValue().getId());} - } catch(Exception e) {logger.fine("Exception logging command stack: " + e.getMessage());}; - } - commandsCalled.push(command); + + if (logger.isLoggable(Level.FINE)) { + int instance = (int) (100 * Math.random()); + try { + logger.fine("Current Command Stack (" + instance + "): "); + commandsCalled.forEach((c) -> { + logger.fine("Command (" + instance + "): " + c.getClass().getSimpleName() + + "for DvObjects"); + for (Map.Entry e : ((Map) c.getAffectedDvObjects()) + .entrySet()) { + logger.fine("(" + instance + "): " + e.getKey() + " : " + e.getValue().getId()); + } + }); + logger.fine("Adding command(" + instance + "): " + command.getClass().getSimpleName() + + " for DvObjects"); + for (Map.Entry e : ((Map) command + .getAffectedDvObjects()).entrySet()) { + logger.fine(e.getKey() + " : " + e.getValue().getId()); + } + } catch (Exception e) { + logger.fine("Exception logging command stack(" + instance + "): " + e.getMessage()); + } + ; + } + commandsCalled.push(command); } From 63096ecbb7de85eff00d0015ea46607801645845 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Tue, 3 Nov 2020 16:21:16 -0500 Subject: [PATCH 020/179] file tags searchable from advanced search and dataset page #7194 --- doc/release-notes/7194-file-tags-search.md | 5 +++++ .../java/edu/harvard/iq/dataverse/DatasetPage.java | 1 + .../iq/dataverse/search/AdvancedSearchPage.java | 13 +++++++++++++ src/main/java/propertyFiles/Bundle.properties | 2 ++ src/main/webapp/search/advanced.xhtml | 10 ++++++++++ 5 files changed, 31 insertions(+) create mode 100644 doc/release-notes/7194-file-tags-search.md diff --git a/doc/release-notes/7194-file-tags-search.md b/doc/release-notes/7194-file-tags-search.md new file mode 100644 index 00000000000..d56b29d7b98 --- /dev/null +++ b/doc/release-notes/7194-file-tags-search.md @@ -0,0 +1,5 @@ +### File Tags Searchable from Advanced Search and Dataset Search. + +File tags ("Documentation", "Data", "Code", etc.) now appear on the Advanced Search page. + +Performing a search for files on the dataset page now includes file tags. (Previously, only file name and file description were searched.) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 803b3fa5243..f495569dc9a 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -809,6 +809,7 @@ public Set getFileIdsInVersionFromSolr(Long datasetVersionId, String patte // searching on the file name ("label") and description: queryStrings.add(SearchUtil.constructQuery(SearchFields.FILE_NAME, pattern + "*")); queryStrings.add(SearchUtil.constructQuery(SearchFields.FILE_DESCRIPTION, pattern + "*")); + queryStrings.add(SearchUtil.constructQuery(SearchFields.FILE_TAG_SEARCHABLE, pattern + "*")); solrQuery.setQuery(SearchUtil.constructQuery(queryStrings, false)); } else { diff --git a/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java b/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java index 821236f4d8f..f3af7bf90c7 100644 --- a/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/search/AdvancedSearchPage.java @@ -57,6 +57,7 @@ public class AdvancedSearchPage implements java.io.Serializable { private String fileFieldFiletype; private String fileFieldVariableName; private String fileFieldVariableLabel; + private String fileFieldFileTags; public void init() { @@ -174,6 +175,10 @@ private String constructFileQuery() { queryStrings.add(constructQuery(SearchFields.VARIABLE_LABEL, fileFieldVariableLabel)); } + if (StringUtils.isNotBlank(fileFieldFileTags)) { + queryStrings.add(constructQuery(SearchFields.FILE_TAG_SEARCHABLE, fileFieldFileTags)); + } + return constructQuery(queryStrings, true); } @@ -319,4 +324,12 @@ public void setFileFieldVariableLabel(String fileFieldVariableLabel) { this.fileFieldVariableLabel = fileFieldVariableLabel; } + public String getFileFieldFileTags() { + return fileFieldFileTags; + } + + public void setFileFieldFileTags(String fileFieldFileTags) { + this.fileFieldFileTags = fileFieldFileTags; + } + } diff --git a/src/main/java/propertyFiles/Bundle.properties b/src/main/java/propertyFiles/Bundle.properties index 2defaa42b69..8319898ea16 100755 --- a/src/main/java/propertyFiles/Bundle.properties +++ b/src/main/java/propertyFiles/Bundle.properties @@ -819,6 +819,8 @@ advanced.search.files.variableLabel.tip=A short description of the variable. advanced.search.datasets.persistentId.tip=The persistent identifier for the dataset. advanced.search.datasets.persistentId=Dataset Persistent ID advanced.search.datasets.persistentId.tip=The unique persistent identifier for a dataset, which can be a Handle or DOI in Dataverse. +advanced.search.files.fileTags=File Tags +advanced.search.files.fileTags.tip=Terms such "Documentation", "Data", or "Code" that have been applied to files. # search search.datasets.literalquestion=Text of the actual, literal question asked. diff --git a/src/main/webapp/search/advanced.xhtml b/src/main/webapp/search/advanced.xhtml index 2b67877fe4a..d4c621226b6 100644 --- a/src/main/webapp/search/advanced.xhtml +++ b/src/main/webapp/search/advanced.xhtml @@ -206,6 +206,16 @@ +
+ +
+ +
+
From e0fc1f8294a495b4053dfabf1de6a25c402921c8 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Wed, 4 Nov 2020 16:49:22 -0500 Subject: [PATCH 021/179] get dialog to close on Save Changes for groups #6792 --- .../files/var/www/dataverse/branding/analytics-code.html | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html index f300a5054e8..95357da85a3 100644 --- a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html +++ b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html @@ -1,7 +1,6 @@ From 119c0bc0f89f42cd2b4c60d30923196cb508939e Mon Sep 17 00:00:00 2001 From: "don.sizemore" Date: Thu, 5 Nov 2020 11:08:04 -0500 Subject: [PATCH 022/179] #7394 recommend RHEL/CentOS 8 --- .../source/installation/prerequisites.rst | 26 ++++++++++++------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/doc/sphinx-guides/source/installation/prerequisites.rst b/doc/sphinx-guides/source/installation/prerequisites.rst index 7d1d06f4903..3e9e2fa879e 100644 --- a/doc/sphinx-guides/source/installation/prerequisites.rst +++ b/doc/sphinx-guides/source/installation/prerequisites.rst @@ -14,7 +14,7 @@ After following all the steps below, you can proceed to the :doc:`installation-m Linux ----- -We assume you plan to run Dataverse on Linux and we recommend RHEL/CentOS, which is the Linux distribution tested by the Dataverse development team. Please be aware that while el7 (RHEL/CentOS 7) is the recommended platform, the steps below were orginally written for el6 and may need to be updated (please feel free to make a pull request!). +We assume you plan to run Dataverse on Linux and we recommend RHEL/CentOS, which is the Linux distribution tested by the Dataverse development team. Please be aware that while el8 (RHEL/CentOS 8) is the recommended platform, the steps below were orginally written for el6 and may need to be updated (please feel free to make a pull request!). Java ---- @@ -322,27 +322,35 @@ components and libraries. Please consult the instructions in the Installing R ============ -Can be installed with :fixedwidthplain:`yum`:: +For RHEL/CentOS, the EPEL distribution is strongly recommended:: - yum install R-core R-core-devel +If :fixedwidthplain:`yum` isn't configured to use EPEL repositories ( https://fedoraproject.org/wiki/EPEL ): -EPEL distribution is strongly recommended. The version of R currently available from epel6 and epel7 is 3.5; it has been tested and is known to work on RedHat and CentOS versions 6 and 7. +RHEL/CentOS 8 users can install the epel-release RPM:: -If :fixedwidthplain:`yum` isn't configured to use EPEL repositories ( https://fedoraproject.org/wiki/EPEL ): + yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm -RHEL/CentOS users can install the RPM :fixedwidthplain:`epel-release`. For RHEL/CentOS 7:: +RHEL/CentOS 7 users can install the epel-release RPM:: yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-7.noarch.rpm -RHEL/CentOS users can install the RPM :fixedwidthplain:`epel-release`. For RHEL/CentOS 6:: +RHEL 8 users will need to enable the CodeReady-Builder repository:: + + subscription-manager repos --enable codeready-builder-for-rhel-8-x86_64-rpms - yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-6.noarch.rpm +CentOS 8 users will need to enable the PowerTools repository:: -RHEL users will want to log in to their organization's respective RHN interface, find the particular machine in question and: + dnf config-manager --enable PowerTools + +RHEL 7 users will want to log in to their organization's respective RHN interface, find the particular machine in question and: â€ĸ click on "Subscribed Channels: Alter Channel Subscriptions" â€ĸ enable EPEL, Server Extras, Server Optional +Finally, install R with :fixedwidthplain:`yum`:: + + yum install R-core R-core-devel + Installing the required R libraries =================================== From 5c0fb2feb680c502109d79bdf8b53eb5d94ac0e2 Mon Sep 17 00:00:00 2001 From: "don.sizemore" Date: Thu, 5 Nov 2020 15:19:27 -0500 Subject: [PATCH 023/179] #7384 point docker-aio, docker-dcm, openshift, and vagrant at CentOS 8 --- conf/docker-aio/c8.dockerfile | 82 +++++++++ conf/docker-aio/readme.md | 2 +- conf/docker-dcm/c8client.dockerfile | 7 + conf/docker-dcm/dcmsrv.dockerfile | 2 +- conf/docker-dcm/docker-compose.yml | 2 +- conf/docker-dcm/rsalsrv.dockerfile | 2 +- conf/openshift/openshift.json | 12 +- conf/vagrant/etc/yum.repos.d/shibboleth.repo | 6 +- .../source/installation/shibboleth.rst | 6 +- scripts/r/rserve/rserve-setup.sh | 2 +- tests/jenkins/groupvars.yml | 163 ------------------ 11 files changed, 108 insertions(+), 178 deletions(-) create mode 100644 conf/docker-aio/c8.dockerfile create mode 100644 conf/docker-dcm/c8client.dockerfile delete mode 100644 tests/jenkins/groupvars.yml diff --git a/conf/docker-aio/c8.dockerfile b/conf/docker-aio/c8.dockerfile new file mode 100644 index 00000000000..e5994dbcf95 --- /dev/null +++ b/conf/docker-aio/c8.dockerfile @@ -0,0 +1,82 @@ +FROM centos:8 +# OS dependencies +# PG 10 is the default in centos8; keep the repo comment for when we bump to 11+ +#RUN yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-8-x86_64/pgdg-redhat-repo-latest.noarch.rpm +RUN yum install -y java-1.8.0-openjdk-devel postgresql-server sudo epel-release unzip perl curl httpd +RUN yum install -y jq lsof awscli + +# copy and unpack dependencies (solr, payara) +COPY dv /tmp/dv +COPY testdata/schema*.xml /tmp/dv/ +COPY testdata/solrconfig.xml /tmp/dv + +# ITs need files +COPY testdata/sushi_sample_logs.json /tmp/ + +# IPv6 and localhost appears to be related to some of the intermittant connection issues +COPY disableipv6.conf /etc/sysctl.d/ +RUN rm /etc/httpd/conf/* +COPY httpd.conf /etc/httpd/conf +RUN cd /opt ; tar zxf /tmp/dv/deps/solr-7.7.2dv.tgz +RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2020.2.zip ; ln -s /opt/payara5 /opt/glassfish4 + +# this copy of domain.xml is the result of running `asadmin set server.monitoring-service.module-monitoring-levels.jvm=LOW` on a default glassfish installation (aka - enable the glassfish REST monitir endpoint for the jvm` +COPY domain-restmonitor.xml /opt/payara5/glassfish/domains/domain1/config/domain.xml + +#RUN sudo -u postgres /usr/bin/initdb -D /var/lib/pgsql/data +RUN sudo -u postgres /usr/pgsql-9.6/bin/initdb -D /var/lib/pgsql/data + +# copy configuration related files +RUN cp /tmp/dv/pg_hba.conf /var/lib/pgsql/data/ +RUN cp -r /opt/solr-7.7.2/server/solr/configsets/_default /opt/solr-7.7.2/server/solr/collection1 +RUN cp /tmp/dv/schema*.xml /opt/solr-7.7.2/server/solr/collection1/conf/ +RUN cp /tmp/dv/solrconfig.xml /opt/solr-7.7.2/server/solr/collection1/conf/solrconfig.xml + +# skipping payara user and solr user (run both as root) + +#solr port +EXPOSE 8983 + +# postgres port +EXPOSE 5432 + +# payara port +EXPOSE 8080 + +# apache port, http +EXPOSE 80 + +# debugger ports (jmx,jdb) +EXPOSE 8686 +EXPOSE 9009 + +RUN mkdir /opt/dv + +# keeping the symlink on the off chance that something else is still assuming /usr/local/glassfish4 +RUN ln -s /opt/payara5 /usr/local/glassfish4 +COPY dv/install/ /opt/dv/ +COPY install.bash /opt/dv/ +COPY entrypoint.bash /opt/dv/ +COPY testdata /opt/dv/testdata +COPY testdata/updateSchemaMDB.sh /opt/dv/testdata/ +COPY testscripts/* /opt/dv/testdata/ +COPY setupIT.bash /opt/dv +WORKDIR /opt/dv + +# need to take DOI provider info from build args as of ec377d2a4e27424db8815c55ce544deee48fc5e0 +# Default to EZID; use built-args to switch to DataCite (or potentially handles) +#ARG DoiProvider=EZID +ARG DoiProvider=FAKE +ARG doi_baseurl=https://ezid.cdlib.org +ARG doi_username=apitest +ARG doi_password=apitest +ENV DoiProvider=${DoiProvider} +ENV doi_baseurl=${doi_baseurl} +ENV doi_username=${doi_username} +ENV doi_password=${doi_password} +COPY configure_doi.bash /opt/dv + +# healthcheck for payara only (assumes modified domain.xml); +# does not check dataverse application status. +HEALTHCHECK CMD curl --fail http://localhost:4848/monitoring/domain/server.json || exit 1 +CMD ["/opt/dv/entrypoint.bash"] diff --git a/conf/docker-aio/readme.md b/conf/docker-aio/readme.md index 32b1bacc65f..fb0fc00d2f7 100644 --- a/conf/docker-aio/readme.md +++ b/conf/docker-aio/readme.md @@ -27,7 +27,7 @@ Also usable for normal development and system evaluation; not intended for produ #### Setup - `cd conf/docker-aio`, and run `./1prep.sh` to copy files for integration test data into docker build context; `1prep.sh` will also build the war file and installation zip file -- build the docker image: `docker build -t dv0 -f c7.dockerfile .` +- build the docker image: `docker build -t dv0 -f c8.dockerfile .` - Run image: `docker run -d -p 8083:8080 -p 8084:80 --name dv dv0` (aka - forward port 8083 locally to 8080 in the container for payara, and 8084 to 80 for apache); if you'd like to connect a java debugger to payara, use `docker run -d -p 8083:8080 -p 8084:80 -p 9010:9009 --name dv dv0` diff --git a/conf/docker-dcm/c8client.dockerfile b/conf/docker-dcm/c8client.dockerfile new file mode 100644 index 00000000000..f2edd7f373c --- /dev/null +++ b/conf/docker-dcm/c8client.dockerfile @@ -0,0 +1,7 @@ +# build from repo root +FROM centos:8 +RUN yum install -y epel-release +RUN yum install -y rsync openssh-clients jq curl wget lynx +RUN useradd depositor +USER depositor +WORKDIR /home/depositor diff --git a/conf/docker-dcm/dcmsrv.dockerfile b/conf/docker-dcm/dcmsrv.dockerfile index 9989fa3a89d..01b4e5b742e 100644 --- a/conf/docker-dcm/dcmsrv.dockerfile +++ b/conf/docker-dcm/dcmsrv.dockerfile @@ -1,5 +1,5 @@ # build from repo root -FROM centos:6 +FROM centos:8 RUN yum install -y epel-release ARG RPMFILE=dcm-0.5-0.noarch.rpm COPY ${RPMFILE} /tmp/ diff --git a/conf/docker-dcm/docker-compose.yml b/conf/docker-dcm/docker-compose.yml index 49d4467d349..0f449880d32 100644 --- a/conf/docker-dcm/docker-compose.yml +++ b/conf/docker-dcm/docker-compose.yml @@ -39,7 +39,7 @@ services: client: build: context: . - dockerfile: c6client.dockerfile + dockerfile: c8client.dockerfile command: sleep infinity container_name: dcm_client volumes: diff --git a/conf/docker-dcm/rsalsrv.dockerfile b/conf/docker-dcm/rsalsrv.dockerfile index 844432afe6b..836d6837db9 100644 --- a/conf/docker-dcm/rsalsrv.dockerfile +++ b/conf/docker-dcm/rsalsrv.dockerfile @@ -1,4 +1,4 @@ -FROM centos:7 +FROM centos:8 ARG RPMFILE=rsal-0.1-0.noarch.rpm RUN yum update; yum install -y epel-release COPY ${RPMFILE} /tmp/ diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json index 583079a5260..bd569c7cfde 100644 --- a/conf/openshift/openshift.json +++ b/conf/openshift/openshift.json @@ -150,10 +150,10 @@ "kind": "ImageStream", "apiVersion": "v1", "metadata": { - "name": "centos-postgresql-94-centos7" + "name": "centos-postgresql-10-centos8" }, "spec": { - "dockerImageRepository": "centos/postgresql-94-centos7" + "dockerImageRepository": "centos/postgresql-10-centos8" } }, { @@ -397,8 +397,8 @@ "spec": { "containers": [ { - "name": "centos-postgresql-94-centos7", - "image": "centos/postgresql-94-centos7", + "name": "centos-postgresql-10-centos8", + "image": "centos/postgresql-10-centos8", "command": [ "sh", "-c", @@ -506,11 +506,11 @@ "imageChangeParams": { "automatic": true, "containerNames": [ - "centos-postgresql-94-centos7" + "centos-postgresql-10-centos8" ], "from": { "kind": "ImageStreamTag", - "name": "centos/postgresql-94-centos7:latest" + "name": "centos/postgresql-10-centos8:latest" } } }, diff --git a/conf/vagrant/etc/yum.repos.d/shibboleth.repo b/conf/vagrant/etc/yum.repos.d/shibboleth.repo index ebbe3747a10..3a9277b127f 100644 --- a/conf/vagrant/etc/yum.repos.d/shibboleth.repo +++ b/conf/vagrant/etc/yum.repos.d/shibboleth.repo @@ -1,7 +1,7 @@ [security_shibboleth] -name=Shibboleth (CentOS_CentOS-6) +name=Shibboleth (CentOS_8) type=rpm-md -baseurl=http://download.opensuse.org/repositories/security:/shibboleth/CentOS_CentOS-6/ +baseurl=http://download.opensuse.org/repositories/security:/shibboleth/CentOS_8/ gpgcheck=1 -gpgkey=http://download.opensuse.org/repositories/security:/shibboleth/CentOS_CentOS-6/repodata/repomd.xml.key +gpgkey=http://download.opensuse.org/repositories/security:/shibboleth/CentOS_8/repodata/repomd.xml.key enabled=1 diff --git a/doc/sphinx-guides/source/installation/shibboleth.rst b/doc/sphinx-guides/source/installation/shibboleth.rst index 3d24c823e77..7d24b5c5c32 100644 --- a/doc/sphinx-guides/source/installation/shibboleth.rst +++ b/doc/sphinx-guides/source/installation/shibboleth.rst @@ -50,6 +50,10 @@ Install ``wget`` if you don't have it already: ``yum install wget`` +If you are running el8 (RHEL/CentOS 8): + +``wget http://download.opensuse.org/repositories/security:/shibboleth/CentOS_8/security:shibboleth.repo`` + If you are running el7 (RHEL/CentOS 7): ``wget http://download.opensuse.org/repositories/security:/shibboleth/CentOS_7/security:shibboleth.repo`` @@ -273,7 +277,7 @@ On CentOS 6: Configure Apache and shibd to Start at Boot ------------------------------------------- -On CentOS 7: +On CentOS 7/8: ``systemctl enable httpd.service`` diff --git a/scripts/r/rserve/rserve-setup.sh b/scripts/r/rserve/rserve-setup.sh index 2ed0dae2700..48ee747499a 100755 --- a/scripts/r/rserve/rserve-setup.sh +++ b/scripts/r/rserve/rserve-setup.sh @@ -47,7 +47,7 @@ then echo "You can start Rserve daemon by executing" echo " service rserve start" echo - echo "If this is a RedHat/CentOS 7 system, you may want to use the systemctl file rserve.service instead (provided in this directory)" + echo "If this is a RedHat/CentOS 7/8 system, you may want to use the systemctl file rserve.service instead (provided in this directory)" else echo "Rserve startup file already in place." fi diff --git a/tests/jenkins/groupvars.yml b/tests/jenkins/groupvars.yml deleted file mode 100644 index 3d1520725c1..00000000000 --- a/tests/jenkins/groupvars.yml +++ /dev/null @@ -1,163 +0,0 @@ ---- -# dataverse/defaults/main.yml - -apache: - ssl: - enabled: false - cert: - interm: - key: - pem: - cert: - key: - interm: - -letsencrypt: - enabled: false - certbot: - autorenew: true - email: - user: certbot - -dataverse: - adminpass: admin1 - allow_signups: true - api: - allow_lookup: false - blocked_endpoints: "admin,test" - blocked_policy: "localhost-only" - location: "http://localhost:8080/api" - test_suite: true - copyright: "Your Institution" - counter: - enabled: false - geoipdir: maxmind_geoip - geoipfile: GeoLite2-Country.mmdb - hub_api_token: set_me_in_secrets - hub_base_url: "https://api.datacite.org" - machines_url: "https://raw.githubusercontent.com/CDLUC3/Make-Data-Count/master/user-agents/lists/machine.txt" - maxmind_geoip_country_path: "maxmind_geoip/GeoLite2-Country.mmdb" - output_file: "/dataverse/sushi_sample_logs" - output_format: json - platform: dash - robots_url: "https://raw.githubusercontent.com/CDLUC3/Make-Data-Count/master/user-agents/lists/robot.txt" - version: "0.1" - upload_to_hub: False - user: counter - year_month: "2018-05" - db: - name: dvndb - host: localhost - user: dvnuser - pass: dvnsecret - port: 5432 - demo: false - doi: - authority: "10.5072" - baseurl: https://mds.test.datacite.org/ - username: "testaccount" - password: "notmypassword" - protocol: doi - provider: FAKE - shoulder: "FK2/" - externaltools: - datacurationtool: - enabled: true - method: demo - dataexplorer: - enabled: true - wholetale: - enabled: false - filesdir: /usr/local/dvn/data - glassfish: - user: glassfish - group: glassfish - domain: domain1 - logformat: ulf - adminuser: admin - adminpass: notPr0d - siteurl: - timeout: 180 - root: /usr/local - dir: glassfish4 # change this for payara, etc. - zipurl: http://dlc-cdn.sun.com/glassfish/4.1/release/glassfish-4.1.zip - #zipurl: https://search.maven.org/remotecontent?filepath=fish/payara/distributions/payara/5.192/payara-5.192.zip - google_analytics_key: - java: - version: 1.8.0 - memheap: 2048 - postgres: - reporpm: https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm - version: 9.6 - jdbcurl: https://jdbc.postgresql.org/download/postgresql-42.2.12.jar - log_lock_waits: true - previewers: - enabled: true - sampledata: - enabled: false - dir: /tmp/sampledata - repo: https://github.com/IQSS/dataverse-sample-data.git - branch: master - venv: /tmp/sampledata_venv - custom_sampledata: - enabled: false - custom_sampledir: "{{ playbook_dir }}/custom_sampledata" - custom_sampledatasets: "{{ playbook_dir }}/custom_sampledata/datasets" - custom_sampledataverses: "{{ playbook_dir }}/custom_sampledata/dataverses" - custom_sampleusers: "{{ playbook_dir }}/custom_sampledata/users" - custom_samplefiles: "{{ playbook_dir }}/custom_sampledata/files" - service_email: noreply@dataverse.yourinstitution.edu - smtp: localhost # or the FQDN of your organization's SMTP relay - solr: - group: solr - root: /usr/local/solr - user: solr - version: 7.7.2 - srcdir: /tmp/dataverse - thumbnails: true - unittests: - enabled: true - argument: '-DcompilerArgument=-Xlint:unchecked test -P all-unit-tests' - usermgmtkey: burrito - version: 4.16 - -grafana: - grafana_user: 'admin' - grafana_password: 'admin' - -munin: - install: false - admin: - user: admin - passwd: p4sswurd - -prometheus: - install: false - node_exporter: https://github.com/prometheus/node_exporter/releases/download/v0.18.1/node_exporter-0.18.1.linux-amd64.tar.gz - root: /usr/local/prometheus - url: https://github.com/prometheus/prometheus/releases/download/v2.11.1/prometheus-2.11.1.linux-amd64.tar.gz - user: prometheus - -rserve: - install: false - host: localhost - user: rserve - group: rserve - pass: rserve - port: 6311 - workdir: /tmp/Rserv - -shibboleth: - enabled: false - repo: "http://download.opensuse.org/repositories/security:/shibboleth/CentOS_7/security:shibboleth.repo" - -sshkeys: - enabled: false - files: - urls: - user: - -# un-nesting the below so's we can pass them at the CLI - -dataverse_branch: release -dataverse_repo: https://github.com/IQSS/dataverse.git From 92e960934f739328c02e8845358ca79f18a99bbc Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 6 Nov 2020 16:40:39 +0100 Subject: [PATCH 024/179] Move index creation from baseline script to a 5.1.1 migration, so it will be applied in order (cannot create an older, out-of-order version as this would break migrations for everyone). #7256 --- .../db/migration/V1__flyway_schema_baseline.sql | 9 --------- .../V5.1.1.3__7256-purge-referencedata.sql | 17 +++++++++++++++++ 2 files changed, 17 insertions(+), 9 deletions(-) create mode 100644 src/main/resources/db/migration/V5.1.1.3__7256-purge-referencedata.sql diff --git a/src/main/resources/db/migration/V1__flyway_schema_baseline.sql b/src/main/resources/db/migration/V1__flyway_schema_baseline.sql index 2ec219cd19e..d1caa63cd71 100644 --- a/src/main/resources/db/migration/V1__flyway_schema_baseline.sql +++ b/src/main/resources/db/migration/V1__flyway_schema_baseline.sql @@ -1,11 +1,2 @@ -- TODO: we still should add the real base line here, too. That would avoid conflicts between EclipseLink -- trying to create new tables on existing databases. See https://github.com/IQSS/dataverse/issues/5871 - --- This is unsupported by JPA, as it is PostgreSQL specific. Has to be done here, cannot be done in code. --- (Only other option would be a lowercase copy of the data as a separate column, automatically filled py JPA) -CREATE UNIQUE INDEX IF NOT EXISTS dataverse_alias_unique_idx on dataverse (LOWER(alias)); -CREATE UNIQUE INDEX IF NOT EXISTS index_authenticateduser_lower_email ON authenticateduser (lower(email)); - --- Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 --- This unique index will prevent the multiple draft issue, yet it cannot be done in JPA code. -CREATE UNIQUE INDEX IF NOT EXISTS one_draft_version_per_dataset ON datasetversion (dataset_id) WHERE versionstate='DRAFT'; \ No newline at end of file diff --git a/src/main/resources/db/migration/V5.1.1.3__7256-purge-referencedata.sql b/src/main/resources/db/migration/V5.1.1.3__7256-purge-referencedata.sql new file mode 100644 index 00000000000..176629f9abb --- /dev/null +++ b/src/main/resources/db/migration/V5.1.1.3__7256-purge-referencedata.sql @@ -0,0 +1,17 @@ +-- #5361 and #7256 is about faster deployments, especially during development, sitting on an empty database. +-- +-- This script has been part of scripts/database/reference_data.sql that had to be executed manually on every new +-- deployment (manually in the sense of Flyway didn't, the outside installer or an admin took care of it). +-- +-- While this is pretty old stuff and should have been done earlier (baseline...), it will be a nice migration +-- and behave like nothing happened if this is an existing installation. All new installation have an empty database +-- on first app boot and benefit from this Flyway-based management. + +-- This is unsupported by JPA, as it is PostgreSQL specific. Has to be done here, cannot be done in code. +-- (Only other option would be a lowercase copy of the data as a separate column, automatically filled py JPA) +CREATE UNIQUE INDEX IF NOT EXISTS dataverse_alias_unique_idx on dataverse (LOWER(alias)); +CREATE UNIQUE INDEX IF NOT EXISTS index_authenticateduser_lower_email ON authenticateduser (lower(email)); + +-- Edit Dataset: Investigate and correct multiple draft issue: https://github.com/IQSS/dataverse/issues/2132 +-- This unique index will prevent the multiple draft issue, yet it cannot be done in JPA code. +CREATE UNIQUE INDEX IF NOT EXISTS one_draft_version_per_dataset ON datasetversion (dataset_id) WHERE versionstate='DRAFT'; \ No newline at end of file From 5132e04e6e61f5bb26b153b9e8b743429445286f Mon Sep 17 00:00:00 2001 From: Oliver Bertuch Date: Fri, 6 Nov 2020 16:44:45 +0100 Subject: [PATCH 025/179] Add a comment to the first bootstrap SQL script. #7256 --- .../migration/afterMigrate__1-upsert-referenceData.sql | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/main/resources/db/migration/afterMigrate__1-upsert-referenceData.sql b/src/main/resources/db/migration/afterMigrate__1-upsert-referenceData.sql index 60140b44485..d45c03bc03b 100644 --- a/src/main/resources/db/migration/afterMigrate__1-upsert-referenceData.sql +++ b/src/main/resources/db/migration/afterMigrate__1-upsert-referenceData.sql @@ -1,3 +1,10 @@ +-- #5361 and #7256 is about faster deployments, especially during development, sitting on an empty database. +-- +-- This script has been part of scripts/database/reference_data.sql that had to be executed manually on every new +-- deployment (manually in the sense of Flyway didn't, the outside installer or an admin took care of it). +-- +-- This script will load some initial, common data if not present (so only once, when booting for the first time). + -- using http://dublincore.org/schemas/xmls/qdc/dcterms.xsd because at http://dublincore.org/schemas/xmls/ it's the -- schema location for http://purl.org/dc/terms/ which is referenced in http://swordapp.github.io/SWORDv2-Profile/SWORDProfile.html INSERT INTO foreignmetadataformatmapping (id, name, startelement, displayName, schemalocation) @@ -28,10 +35,12 @@ INSERT INTO foreignmetadatafieldmapping (id, foreignfieldxpath, datasetfieldname (19, ':language', 'language', FALSE, NULL, 1 ) ON CONFLICT DO NOTHING; +-- Simple trick: WHERE NOT EXISTS (SELECT id FROM table) is only true if the table is empty. INSERT INTO guestbook (emailrequired, enabled, institutionrequired, createtime, name, namerequired, positionrequired, dataverse_id) SELECT false, true, false, now(), 'Default', false, false, null WHERE NOT EXISTS (SELECT id FROM guestbook); +-- Simple trick: WHERE NOT EXISTS (SELECT id FROM table) is only true if the table is empty. INSERT INTO worldmapauth_tokentype (name, created, contactemail, hostname, ipaddress, mapitlink, From 28d38c3a11ef41f738369100c9dc34afc69e77b9 Mon Sep 17 00:00:00 2001 From: "don.sizemore" Date: Mon, 9 Nov 2020 13:44:32 -0500 Subject: [PATCH 026/179] #7394 remove conf/openshift/openshift.json per @pdurbin --- conf/openshift/openshift.json | 646 ---------------------------------- 1 file changed, 646 deletions(-) delete mode 100644 conf/openshift/openshift.json diff --git a/conf/openshift/openshift.json b/conf/openshift/openshift.json deleted file mode 100644 index bd569c7cfde..00000000000 --- a/conf/openshift/openshift.json +++ /dev/null @@ -1,646 +0,0 @@ -{ - "kind": "Template", - "apiVersion": "v1", - "metadata": { - "name": "dataverse", - "labels": { - "name": "dataverse" - }, - "annotations": { - "openshift.io/description": "Dataverse is open source research data repository software: https://dataverse.org", - "openshift.io/display-name": "Dataverse" - } - }, - "objects": [ - { - "kind": "Secret", - "apiVersion": "v1", - "metadata": { - "name": "dataverse-admin-secret" - }, - "stringData" : { - "admin-password" : "${ADMIN_PASSWORD}" - } - }, - { - "kind": "Secret", - "apiVersion": "v1", - "metadata": { - "name": "dataverse-postgresql-secret" - }, - "stringData" : { - "postgresql-user" : "${POSTGRESQL_USER}", - "postgresql-password" : "${POSTGRESQL_PASSWORD}" - } - }, - { - "kind": "Secret", - "apiVersion": "v1", - "metadata": { - "name": "dataverse-postgresql-master-secret" - }, - "stringData" : { - "postgresql-master-user" : "${POSTGRESQL_MASTER_USER}", - "postgresql-master-password" : "${POSTGRESQL_MASTER_PASSWORD}" - } - }, - { - "kind": "Secret", - "apiVersion": "v1", - "metadata": { - "name": "dataverse-postgresql-admin-secret" - }, - "stringData" : { - "postgresql-admin-password" : "${POSTGRESQL_ADMIN_PASSWORD}" - } - }, - { - "kind": "Service", - "apiVersion": "v1", - "metadata": { - "name": "dataverse-glassfish-service" - }, - "spec": { - "selector": { - "name": "iqss-dataverse-glassfish" - }, - "ports": [ - { - "name": "web", - "protocol": "TCP", - "port": 8080, - "targetPort": 8080 - } - ] - } - }, - { - "kind": "Service", - "apiVersion": "v1", - "metadata": { - "name": "dataverse-postgresql-service" - }, - "spec": { - "selector": { - "name": "iqss-dataverse-postgresql" - }, - "clusterIP": "None", - "ports": [ - { - "name": "database", - "protocol": "TCP", - "port": 5432, - "targetPort": 5432 - } - ] - } - }, - { - "kind": "Service", - "apiVersion": "v1", - "metadata": { - "name": "dataverse-solr-service" - }, - "spec": { - "selector": { - "name": "iqss-dataverse-solr" - }, - "clusterIP": "None", - "ports": [ - { - "name": "search", - "protocol": "TCP", - "port": 8983, - "targetPort": 8983 - } - ] - } - }, - { - "apiVersion": "v1", - "kind": "Route", - "metadata": { - "annotations": { - "openshift.io/host.generated": "true" - }, - "name": "dataverse" - }, - "spec": { - "port": { - "targetPort": "web" - }, - "to": { - "kind": "Service", - "name": "dataverse-glassfish-service", - "weight": 100 - } - } - }, - { - "kind": "ImageStream", - "apiVersion": "v1", - "metadata": { - "name": "dataverse-plus-glassfish" - }, - "spec": { - "dockerImageRepository": "iqss/dataverse-glassfish" - } - }, - { - "kind": "ImageStream", - "apiVersion": "v1", - "metadata": { - "name": "centos-postgresql-10-centos8" - }, - "spec": { - "dockerImageRepository": "centos/postgresql-10-centos8" - } - }, - { - "kind": "ImageStream", - "apiVersion": "v1", - "metadata": { - "name": "iqss-dataverse-solr" - }, - "spec": { - "dockerImageRepository": "iqss/dataverse-solr" - } - }, - { - "kind": "StatefulSet", - "apiVersion": "apps/v1beta1", - "metadata": { - "name": "dataverse-glassfish", - "annotations": { - "template.alpha.openshift.io/wait-for-ready": "true", - "alpha.image.policy.openshift.io/resolve-names": "*" - } - }, - "spec": { - "serviceName": "dataverse-glassfish", - "replicas": 1, - "template": { - "metadata": { - "labels": { - "name": "iqss-dataverse-glassfish" - } - }, - "spec": { - "initContainers": [ - { - "name": "start-glassfish", - "image": "iqss/init-container:latest", - "imagePullPolicy": "IfNotPresent", - "env": [ - { - "name": "CONTAINER_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "MY_POD_NAME", - "value": "start-glassfish" - }, - { - "name": "POSTGRES_ADMIN_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-admin-secret", - "key" : "postgresql-admin-password" - } - } - }, - { - "name": "POSTGRES_SERVER", - "value": "dataverse-postgresql-0" - }, - - { - "name": "POSTGRES_SERVICE_HOST", - "value": "dataverse-postgresql-service" - }, - { - "name": "POSTGRES_USER", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-secret", - "key" : "postgresql-user" - } - } - }, - { - "name": "POSTGRES_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-secret", - "key" : "postgresql-password" - } - } - }, - { - "name": "POSTGRES_DATABASE", - "value": "dvndb" - } - ] - } - ], - "containers": [ - { - "name": "dataverse-plus-glassfish", - "image": "iqss/dataverse-glassfish", - "ports": [ - { - "containerPort": 8080, - "protocol": "TCP" - } - ], - "resources": { - "limits": { - "memory": "2048Mi" - } - }, - "env": [ - { - "name": "MY_POD_NAME", - "valueFrom": { - "fieldRef": { - "fieldPath": "metadata.name" - } - } - }, - { - "name": "POSTGRES_SERVER", - "value": "dataverse-postgresql-0" - }, - { - "name": "POSTGRESQL_ADMIN_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-admin-secret", - "key" : "postgresql-admin-password" - } - } - }, - { - "name": "POSTGRES_SERVICE_HOST", - "value": "dataverse-postgresql-service" - }, - { - "name": "SOLR_SERVICE_HOST", - "value": "dataverse-solr-service" - }, - { - "name": "ADMIN_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-admin-secret", - "key" : "admin-password" - } - } - }, - { - "name": "SMTP_HOST", - "value": "localhost" - }, - { - "name": "POSTGRES_USER", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-secret", - "key" : "postgresql-user" - } - } - }, - { - "name": "POSTGRES_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-secret", - "key" : "postgresql-password" - } - } - }, - { - "name": "POSTGRES_DATABASE", - "value": "dvndb" - } - ], - "imagePullPolicy": "IfNotPresent", - "securityContext": { - "capabilities": {}, - "privileged": false - } - } - ] - } - }, - "strategy": { - "type": "Rolling", - "rollingParams": { - "updatePeriodSeconds": 1, - "intervalSeconds": 1, - "timeoutSeconds": 300 - }, - "resources": { - "limits": { - "memory": "512Mi" - } - } - }, - "triggers": [ - { - "type": "ImageChange", - "imageChangeParams": { - "automatic": true, - "containerNames": [ - "dataverse-plus-glassfish" - ], - "from": { - "kind": "ImageStreamTag", - "name": "dataverse-plus-glassfish:latest" - } - } - }, - { - "type": "ConfigChange" - } - ], - "selector": { - "name": "iqss-dataverse-glassfish", - "matchLabels": { - "name": "iqss-dataverse-glassfish" - } - } - } - }, - { - "kind": "StatefulSet", - "apiVersion": "apps/v1beta1", - "metadata": { - "name": "dataverse-postgresql", - "annotations": { - "template.alpha.openshift.io/wait-for-ready": "true" - } - }, - "spec": { - "serviceName": "dataverse-postgresql-service", - "replicas": 1, - "template": { - "metadata": { - "labels": { - "name": "iqss-dataverse-postgresql" - } - }, - "spec": { - "containers": [ - { - "name": "centos-postgresql-10-centos8", - "image": "centos/postgresql-10-centos8", - "command": [ - "sh", - "-c", - "echo 'Setting up Postgres Master/Slave replication...'; [[ `hostname` =~ -([0-9]+)$ ]] || exit 1; ordinal=${BASH_REMATCH[1]}; if [[ $ordinal -eq 0 ]]; then run-postgresql-master; else run-postgresql-slave; fi;" - ], - "ports": [ - { - "containerPort": 5432, - "protocol": "TCP" - } - ], - "env": [ - { - "name": "POSTGRESQL_USER", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-secret", - "key" : "postgresql-user" - } - } - }, - { - "name": "POSTGRESQL_MASTER_USER", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-master-secret", - "key" : "postgresql-master-user" - } - } - }, - { - "name": "POSTGRESQL_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-secret", - "key" : "postgresql-password" - } - } - }, - { - "name": "POSTGRESQL_MASTER_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-master-secret", - "key" : "postgresql-master-password" - } - } - }, - { - "name": "POSTGRESQL_MASTER_SERVICE_NAME", - "value": "dataverse-postgresql-service" - }, - { - "name": "POSTGRESQL_MASTER_IP", - "value": "dataverse-postgresql-0.dataverse-postgresql-service" - }, - { - "name": "postgresql_master_addr", - "value": "dataverse-postgresql-0.dataverse-postgresql-service" - }, - { - "name": "master_fqdn", - "value": "dataverse-postgresql-0.dataverse-postgresql-service" - }, - { - "name": "POSTGRESQL_DATABASE", - "value": "dvndb" - }, - { - "name": "POSTGRESQL_ADMIN_PASSWORD", - "valueFrom": { - "secretKeyRef": { - "name" : "dataverse-postgresql-admin-secret", - "key" : "postgresql-admin-password" - } - } - } - ], - "resources": { - "limits": { - "memory": "256Mi" - } - }, - "imagePullPolicy": "IfNotPresent", - "securityContext": { - "capabilities": {}, - "privileged": false - } - } - ] - } - }, - "strategy": { - "type": "Rolling", - "rollingParams": { - "updatePeriodSeconds": 1, - "intervalSeconds": 1, - "timeoutSeconds": 300 - }, - "resources": {} - }, - "triggers": [ - { - "type": "ImageChange", - "imageChangeParams": { - "automatic": true, - "containerNames": [ - "centos-postgresql-10-centos8" - ], - "from": { - "kind": "ImageStreamTag", - "name": "centos/postgresql-10-centos8:latest" - } - } - }, - { - "type": "ConfigChange" - } - ], - "selector": { - "name": "iqss-dataverse-postgresql", - "matchLabels": { - "name": "iqss-dataverse-postgresql" - } - } - } - }, - { - "kind": "StatefulSet", - "apiVersion": "apps/v1beta1", - "metadata": { - "name": "dataverse-solr", - "annotations": { - "template.alpha.openshift.io/wait-for-ready": "true" - } - }, - "spec": { - "serviceName" : "dataverse-solr-service", - "template": { - "metadata": { - "labels": { - "name": "iqss-dataverse-solr" - } - }, - "spec": { - "containers": [ - { - "name": "iqss-dataverse-solr", - "image": "iqss/dataverse-solr", - "ports": [ - { - "containerPort": 8983, - "protocol": "TCP" - } - ], - "resources": { - "limits": { - "memory": "1024Mi" - } - }, - "imagePullPolicy": "IfNotPresent", - "securityContext": { - "capabilities": {}, - "privileged": false - } - } - ] - } - }, - "strategy": { - "type": "Rolling", - "rollingParams": { - "updatePeriodSeconds": 1, - "intervalSeconds": 1, - "timeoutSeconds": 300 - }, - "resources": {} - }, - "triggers": [ - { - "type": "ImageChange", - "imageChangeParams": { - "automatic": true, - "containerNames": [ - "iqss-dataverse-solr" - ], - "from": { - "kind": "ImageStreamTag", - "name": "iqss-dataverse-solr:latest" - } - } - }, - { - "type": "ConfigChange" - } - ], - "replicas": 1, - "selector": { - "name": "iqss-dataverse-solr", - "matchLabels" : { - "name" : "iqss-dataverse-solr" - } - } - } - } - ], - "parameters": [ - { - "name": "ADMIN_PASSWORD", - "description": "admin password", - "generate": "expression", - "from": "[a-zA-Z0-9]{8}" - }, - { - "name": "POSTGRESQL_USER", - "description": "postgresql user", - "generate": "expression", - "from": "user[A-Z0-9]{3}" - }, - { - "name": "POSTGRESQL_PASSWORD", - "description": "postgresql password", - "generate": "expression", - "from": "[a-zA-Z0-9]{8}" - }, - { - "name": "POSTGRESQL_MASTER_USER", - "description": "postgresql master user", - "generate": "expression", - "from": "user[A-Z0-9]{3}" - }, - { - "name": "POSTGRESQL_MASTER_PASSWORD", - "description": "postgresql master password", - "generate": "expression", - "from": "[a-zA-Z0-9]{8}" - }, - { - "name": "POSTGRESQL_ADMIN_PASSWORD", - "description": "postgresql admin password", - "generate": "expression", - "from": "[a-zA-Z0-9]{8}" - } - ] -} From 7f90961db7a1c16fb3562c97552b94463ce9c57d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Nov 2020 15:45:13 -0500 Subject: [PATCH 027/179] use centos 8 image #7394 --- conf/docker-aio/prep_it.bash | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/conf/docker-aio/prep_it.bash b/conf/docker-aio/prep_it.bash index e0078b679b2..adb257e43b1 100755 --- a/conf/docker-aio/prep_it.bash +++ b/conf/docker-aio/prep_it.bash @@ -10,7 +10,7 @@ n_wait=5 cd conf/docker-aio ./0prep_deps.sh ./1prep.sh -docker build -t dv0 -f c7.dockerfile . +docker build -t dv0 -f c8.dockerfile . # cleanup from previous runs if necessary docker rm -f dv # start container From c7fc7c29fae8f9370769fbd2b9c1b598d21f3800 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Nov 2020 15:47:22 -0500 Subject: [PATCH 028/179] remove old centos 7 docker-aio file #7394 We upgraded to centos 8 --- conf/docker-aio/c7.dockerfile | 81 ----------------------------------- 1 file changed, 81 deletions(-) delete mode 100644 conf/docker-aio/c7.dockerfile diff --git a/conf/docker-aio/c7.dockerfile b/conf/docker-aio/c7.dockerfile deleted file mode 100644 index 6b66a0566a8..00000000000 --- a/conf/docker-aio/c7.dockerfile +++ /dev/null @@ -1,81 +0,0 @@ -FROM centos:7 -# OS dependencies -RUN yum install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-7-x86_64/pgdg-redhat-repo-latest.noarch.rpm -RUN yum install -y java-1.8.0-openjdk-devel postgresql96-server sudo epel-release unzip perl curl httpd -RUN yum install -y jq lsof awscli - -# copy and unpack dependencies (solr, payara) -COPY dv /tmp/dv -COPY testdata/schema*.xml /tmp/dv/ -COPY testdata/solrconfig.xml /tmp/dv - -# ITs need files -COPY testdata/sushi_sample_logs.json /tmp/ - -# IPv6 and localhost appears to be related to some of the intermittant connection issues -COPY disableipv6.conf /etc/sysctl.d/ -RUN rm /etc/httpd/conf/* -COPY httpd.conf /etc/httpd/conf -RUN cd /opt ; tar zxf /tmp/dv/deps/solr-7.7.2dv.tgz -RUN cd /opt ; unzip /tmp/dv/deps/payara-5.2020.2.zip ; ln -s /opt/payara5 /opt/glassfish4 - -# this copy of domain.xml is the result of running `asadmin set server.monitoring-service.module-monitoring-levels.jvm=LOW` on a default glassfish installation (aka - enable the glassfish REST monitir endpoint for the jvm` -COPY domain-restmonitor.xml /opt/payara5/glassfish/domains/domain1/config/domain.xml - -#RUN sudo -u postgres /usr/bin/initdb -D /var/lib/pgsql/data -RUN sudo -u postgres /usr/pgsql-9.6/bin/initdb -D /var/lib/pgsql/data - -# copy configuration related files -RUN cp /tmp/dv/pg_hba.conf /var/lib/pgsql/data/ -RUN cp -r /opt/solr-7.7.2/server/solr/configsets/_default /opt/solr-7.7.2/server/solr/collection1 -RUN cp /tmp/dv/schema*.xml /opt/solr-7.7.2/server/solr/collection1/conf/ -RUN cp /tmp/dv/solrconfig.xml /opt/solr-7.7.2/server/solr/collection1/conf/solrconfig.xml - -# skipping payara user and solr user (run both as root) - -#solr port -EXPOSE 8983 - -# postgres port -EXPOSE 5432 - -# payara port -EXPOSE 8080 - -# apache port, http -EXPOSE 80 - -# debugger ports (jmx,jdb) -EXPOSE 8686 -EXPOSE 9009 - -RUN mkdir /opt/dv - -# keeping the symlink on the off chance that something else is still assuming /usr/local/glassfish4 -RUN ln -s /opt/payara5 /usr/local/glassfish4 -COPY dv/install/ /opt/dv/ -COPY install.bash /opt/dv/ -COPY entrypoint.bash /opt/dv/ -COPY testdata /opt/dv/testdata -COPY testdata/updateSchemaMDB.sh /opt/dv/testdata/ -COPY testscripts/* /opt/dv/testdata/ -COPY setupIT.bash /opt/dv -WORKDIR /opt/dv - -# need to take DOI provider info from build args as of ec377d2a4e27424db8815c55ce544deee48fc5e0 -# Default to EZID; use built-args to switch to DataCite (or potentially handles) -#ARG DoiProvider=EZID -ARG DoiProvider=FAKE -ARG doi_baseurl=https://ezid.cdlib.org -ARG doi_username=apitest -ARG doi_password=apitest -ENV DoiProvider=${DoiProvider} -ENV doi_baseurl=${doi_baseurl} -ENV doi_username=${doi_username} -ENV doi_password=${doi_password} -COPY configure_doi.bash /opt/dv - -# healthcheck for payara only (assumes modified domain.xml); -# does not check dataverse application status. -HEALTHCHECK CMD curl --fail http://localhost:4848/monitoring/domain/server.json || exit 1 -CMD ["/opt/dv/entrypoint.bash"] From 5814985cc250097e041cf7dfa9154f926ad9b89e Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Mon, 9 Nov 2020 15:49:40 -0500 Subject: [PATCH 029/179] revert docker-dcm stuff to pre centos 8 #7394 We don't want to test all this stuff at this time. Just put it back to how it was. --- conf/docker-dcm/c8client.dockerfile | 7 ------- conf/docker-dcm/dcmsrv.dockerfile | 2 +- conf/docker-dcm/docker-compose.yml | 2 +- conf/docker-dcm/rsalsrv.dockerfile | 2 +- 4 files changed, 3 insertions(+), 10 deletions(-) delete mode 100644 conf/docker-dcm/c8client.dockerfile diff --git a/conf/docker-dcm/c8client.dockerfile b/conf/docker-dcm/c8client.dockerfile deleted file mode 100644 index f2edd7f373c..00000000000 --- a/conf/docker-dcm/c8client.dockerfile +++ /dev/null @@ -1,7 +0,0 @@ -# build from repo root -FROM centos:8 -RUN yum install -y epel-release -RUN yum install -y rsync openssh-clients jq curl wget lynx -RUN useradd depositor -USER depositor -WORKDIR /home/depositor diff --git a/conf/docker-dcm/dcmsrv.dockerfile b/conf/docker-dcm/dcmsrv.dockerfile index 01b4e5b742e..9989fa3a89d 100644 --- a/conf/docker-dcm/dcmsrv.dockerfile +++ b/conf/docker-dcm/dcmsrv.dockerfile @@ -1,5 +1,5 @@ # build from repo root -FROM centos:8 +FROM centos:6 RUN yum install -y epel-release ARG RPMFILE=dcm-0.5-0.noarch.rpm COPY ${RPMFILE} /tmp/ diff --git a/conf/docker-dcm/docker-compose.yml b/conf/docker-dcm/docker-compose.yml index 0f449880d32..49d4467d349 100644 --- a/conf/docker-dcm/docker-compose.yml +++ b/conf/docker-dcm/docker-compose.yml @@ -39,7 +39,7 @@ services: client: build: context: . - dockerfile: c8client.dockerfile + dockerfile: c6client.dockerfile command: sleep infinity container_name: dcm_client volumes: diff --git a/conf/docker-dcm/rsalsrv.dockerfile b/conf/docker-dcm/rsalsrv.dockerfile index 836d6837db9..844432afe6b 100644 --- a/conf/docker-dcm/rsalsrv.dockerfile +++ b/conf/docker-dcm/rsalsrv.dockerfile @@ -1,4 +1,4 @@ -FROM centos:8 +FROM centos:7 ARG RPMFILE=rsal-0.1-0.noarch.rpm RUN yum update; yum install -y epel-release COPY ${RPMFILE} /tmp/ From 655244e049ca5c1b0adc9dcede695b35b8073c5d Mon Sep 17 00:00:00 2001 From: qqmyers Date: Tue, 10 Nov 2020 16:47:36 -0500 Subject: [PATCH 030/179] only log when add results in 2+ entries in stack --- src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 0fb5fd2a953..800856e2f35 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -344,7 +344,7 @@ public CommandContext getContext() { @Override public void addCommand (Command command){ - if (logger.isLoggable(Level.FINE)) { + if (logger.isLoggable(Level.FINE) && !commandsCalled.isEmpty()) { int instance = (int) (100 * Math.random()); try { logger.fine("Current Command Stack (" + instance + "): "); From 31a02c43621bd783f9f1a6a44c974b559ffff8ea Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 12 Nov 2020 11:49:39 -0500 Subject: [PATCH 031/179] spacing --- .../iq/dataverse/EjbDataverseEngine.java | 54 +++++++++---------- 1 file changed, 27 insertions(+), 27 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index 800856e2f35..b95a894fff0 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -338,35 +338,35 @@ protected void completeCommand(Command command, Object r, Stack called) public CommandContext getContext() { if (ctxt == null) { ctxt = new CommandContext() { - + public Stack commandsCalled; - + @Override - public void addCommand (Command command){ - - if (logger.isLoggable(Level.FINE) && !commandsCalled.isEmpty()) { - int instance = (int) (100 * Math.random()); - try { - logger.fine("Current Command Stack (" + instance + "): "); - commandsCalled.forEach((c) -> { - logger.fine("Command (" + instance + "): " + c.getClass().getSimpleName() - + "for DvObjects"); - for (Map.Entry e : ((Map) c.getAffectedDvObjects()) - .entrySet()) { - logger.fine("(" + instance + "): " + e.getKey() + " : " + e.getValue().getId()); - } - }); - logger.fine("Adding command(" + instance + "): " + command.getClass().getSimpleName() - + " for DvObjects"); - for (Map.Entry e : ((Map) command - .getAffectedDvObjects()).entrySet()) { - logger.fine(e.getKey() + " : " + e.getValue().getId()); - } - } catch (Exception e) { - logger.fine("Exception logging command stack(" + instance + "): " + e.getMessage()); - } - ; - } + public void addCommand(Command command) { + + if (logger.isLoggable(Level.FINE) && !commandsCalled.isEmpty()) { + int instance = (int) (100 * Math.random()); + try { + logger.fine("Current Command Stack (" + instance + "): "); + commandsCalled.forEach((c) -> { + logger.fine("Command (" + instance + "): " + c.getClass().getSimpleName() + + "for DvObjects"); + for (Map.Entry e : ((Map) c.getAffectedDvObjects()) + .entrySet()) { + logger.fine("(" + instance + "): " + e.getKey() + " : " + e.getValue().getId()); + } + }); + logger.fine("Adding command(" + instance + "): " + command.getClass().getSimpleName() + + " for DvObjects"); + for (Map.Entry e : ((Map) command + .getAffectedDvObjects()).entrySet()) { + logger.fine(e.getKey() + " : " + e.getValue().getId()); + } + } catch (Exception e) { + logger.fine("Exception logging command stack(" + instance + "): " + e.getMessage()); + } + ; + } commandsCalled.push(command); } From c9ca9858dde9cffe7ae8536e7174b56244425285 Mon Sep 17 00:00:00 2001 From: qqmyers Date: Thu, 12 Nov 2020 11:51:46 -0500 Subject: [PATCH 032/179] typo --- src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java index b95a894fff0..9606b24ff14 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java +++ b/src/main/java/edu/harvard/iq/dataverse/EjbDataverseEngine.java @@ -365,7 +365,6 @@ public void addCommand(Command command) { } catch (Exception e) { logger.fine("Exception logging command stack(" + instance + "): " + e.getMessage()); } - ; } commandsCalled.push(command); } From b41d428cfe5ee1c6a62c56ec5f9820c84f10c11d Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 12 Nov 2020 13:25:54 -0500 Subject: [PATCH 033/179] Revert "get dialog to close on Save Changes for groups #6792" This reverts commit e0fc1f8294a495b4053dfabf1de6a25c402921c8. We're going to change @all instead. --- .../files/var/www/dataverse/branding/analytics-code.html | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html index 95357da85a3..f300a5054e8 100644 --- a/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html +++ b/doc/sphinx-guides/source/_static/installation/files/var/www/dataverse/branding/analytics-code.html @@ -1,6 +1,7 @@ From 8b141161286151a1114a6ec723ae35e3cd3e3d86 Mon Sep 17 00:00:00 2001 From: Philip Durbin Date: Thu, 12 Nov 2020 13:26:46 -0500 Subject: [PATCH 034/179] for groups, get Save Changes dialog to close #6792 --- src/main/webapp/manage-groups.xhtml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/webapp/manage-groups.xhtml b/src/main/webapp/manage-groups.xhtml index 706c8f7ea74..5a0afedf255 100644 --- a/src/main/webapp/manage-groups.xhtml +++ b/src/main/webapp/manage-groups.xhtml @@ -92,8 +92,8 @@ #{bundle['dataverse.manageGroups.tab.action.btn.delete.dialog.tip']}

- - + +
@@ -171,7 +171,7 @@ + update="manageGroupsForm"> - +

#{bundle['file.deleteFileDialog.tip']}

#{bundle['file.deleteFileDialog.failed.tip']}

- -
@@ -947,7 +947,7 @@ function checkFilesSelected() { var count = PF('filesTable').getSelectedRowsCount(); if (count > 0) { - PF('deleteFileConfirmation').show(); + PF('editDeleteFileConfirmation').show(); } else { PF('selectFilesForDeleteFragment').show(); } From a7e17a12e863c290b35f9706e9a7f1adc5e8f7fc Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Thu, 3 Dec 2020 13:27:32 -0500 Subject: [PATCH 125/179] #7406 fix popup update reference --- src/main/webapp/file-request-access-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-request-access-popup-fragment.xhtml b/src/main/webapp/file-request-access-popup-fragment.xhtml index d161c5d5de6..400eecfd3ac 100644 --- a/src/main/webapp/file-request-access-popup-fragment.xhtml +++ b/src/main/webapp/file-request-access-popup-fragment.xhtml @@ -42,7 +42,7 @@
+ update="@([id$=requestAccessConsolidated])" oncomplete="PF('requestAccessPopup').hide();"> -
@@ -1701,13 +1704,6 @@ function updateTemplate() { $('button[id$="updateTemplate"]').trigger('click'); } - function checkNewlyRestricted() { - if ($('input[id$="showAccessPopup"]').val() === 'true') { - PF('accessPopup').show(); - } else { - $('button[id$="datasetSave"]').trigger('click'); - } - } function updateHiddenReason(textArea) { $('input[id$="hiddenReasonInput"]').val(textArea.value); } diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 8256447f42a..118dcf1151b 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -316,7 +316,6 @@ emptyMessage="#{datasetPage || EditDatafilesPage.showFileUploadFragment() ? bundle['file.noUploadedFiles.tip'] : bundle['file.noSelectedFiles.tip']}">
-
@@ -416,17 +415,22 @@
- -

#{bundle['dataset.noSelectedFilesForDelete']}

-
- -
-
- +

#{bundle['file.deleteFileDialog.tip']}

#{bundle['file.deleteFileDialog.failed.tip']}

- -
@@ -619,8 +614,12 @@
- -
@@ -944,14 +943,6 @@ $('button[id$="updateEditDataFilesButtonsForDelete"]').trigger('click'); $('button[id$="allDeletesFinished"]').trigger('click'); } - function checkFilesSelected() { - var count = PF('filesTable').getSelectedRowsCount(); - if (count > 0) { - PF('deleteFileConfirmation').show(); - } else { - PF('selectFilesForDeleteFragment').show(); - } - } function openDropboxChooser() { options = { // Required. Called when a user selects an item in the Chooser. @@ -969,6 +960,27 @@ }; Dropbox.choose(options); } + + function testFilesSelected(popup) { + var count = PF('filesTable').getSelectedRowsCount(); + if (count == 0) { + PF(popup).show(); + } else { + return true; + } + } + + function testFilesSelectedForRestriction() { + return testFilesSelected("selectFilesForRestrict"); + } + + function testFilesSelectedForUnRestriction() { + return testFilesSelected("selectFilesForUnRestrict"); + } + + function testFilesSelectedForDelete() { + return testFilesSelected("selectFilesForDelete"); + } //]]> diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 81634bcb6d2..1135980e984 100644 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -71,9 +71,11 @@
- +
@@ -81,34 +83,37 @@
-
- +

#{bundle['dataset.noSelectedFilesForRestrict']}

-
- +

#{bundle['dataset.noSelectedFilesForUnRestrict']}

-
+ +

#{bundle['dataset.noSelectedFilesForDelete']}

+
+ +
+
@@ -118,14 +123,6 @@ function clickSave(){ $('button[id$="datasetSave"]').trigger('click'); } - function checkNewlyRestricted() { - if ($('input[id$="showAccessPopup"]').val() === 'true') { - PF('editFileAccessPopup').show(); - } - else { - $('button[id$="datasetSave"]').trigger('click'); - } - } //]]> diff --git a/src/main/webapp/filesFragment.xhtml b/src/main/webapp/filesFragment.xhtml index 36e9203f986..3216d939a6f 100644 --- a/src/main/webapp/filesFragment.xhtml +++ b/src/main/webapp/filesFragment.xhtml @@ -109,7 +109,6 @@
- From 09b3c05efa77eec6d72612f77d2349e13f5ffb92 Mon Sep 17 00:00:00 2001 From: Michael Heppler Date: Fri, 4 Dec 2020 13:41:18 -0500 Subject: [PATCH 135/179] Added custom css to request access dropdown link to apply bootstrap ui theme styles [ref #7406] --- .../file-download-button-fragment.xhtml | 41 +++++++++++++------ 1 file changed, 29 insertions(+), 12 deletions(-) diff --git a/src/main/webapp/file-download-button-fragment.xhtml b/src/main/webapp/file-download-button-fragment.xhtml index 869bc9743ad..da88d1ae64d 100644 --- a/src/main/webapp/file-download-button-fragment.xhtml +++ b/src/main/webapp/file-download-button-fragment.xhtml @@ -175,31 +175,48 @@ - - + and fileMetadata.dataFile.owner.fileAccessRequest}"> -
  • - - + div[id$="requestPanel"].iq-dropdown-list-item {display:list-item !important;} + div[id$="requestPanel"].iq-dropdown-list-item>a.ui-commandlink{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;color:#333;white-space:nowrap} + div[id$="requestPanel"].iq-dropdown-list-item>a.ui-commandlink:focus, + div[id$="requestPanel"].iq-dropdown-list-item>a.ui-commandlink:hover{color:#262626;text-decoration:none;background-color:#f5f5f5} + div[id$="requestPanel"].iq-dropdown-list-item>a.ui-commandlink.active, + div[id$="requestPanel"].iq-dropdown-list-item>a.ui-commandlink.active:focus, + div[id$="requestPanel"].iq-dropdown-list-item>a.ui-commandlink.active:hover{color:#fff;text-decoration:none;background-color:#337ab7;outline:0} + + div[id$="requestPanel"].iq-dropdown-list-item.disabled>span.ui-commandlink.ui-state-disabled{display:block;padding:3px 20px;clear:both;font-weight:400;line-height:1.42857143;white-space:nowrap} + div[id$="requestPanel"].iq-dropdown-list-item.disabled>span.ui-commandlink.ui-state-disabled, + div[id$="requestPanel"].iq-dropdown-list-item.disabled>span.ui-commandlink.ui-state-disabled:focus, + div[id$="requestPanel"].iq-dropdown-list-item.disabled>span.ui-commandlink.ui-state-disabled:hover{background-color:transparent;color:#777;} + div[id$="requestPanel"].iq-dropdown-list-item.disabled>span.ui-commandlink.ui-state-disabled:focus, + div[id$="requestPanel"].iq-dropdown-list-item.disabled>span.ui-commandlink.ui-state-disabled:hover{text-decoration:none;background-color:transparent;background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled=false)} + div[id$="requestPanel"].iq-dropdown-list-item.disabled{cursor:not-allowed;} + div[id$="requestPanel"].iq-dropdown-list-item.disabled>span.ui-commandlink.ui-state-disabled{pointer-events:none;} + + + + - - - #{fileMetadata.dataFile.fileAccessRequesters.contains(dataverseSession.user) ? bundle['file.accessRequested'] : bundle['file.requestAccess']} - + +
  • + #{bundle['file.requestAccess']}
  • -
    From fdaa7603eb5b9cdb6b5ebe7f3c2565b4e11e90ec Mon Sep 17 00:00:00 2001 From: Stephen Kraffmiller Date: Fri, 4 Dec 2020 14:20:32 -0500 Subject: [PATCH 136/179] #7406 add request panel to popup update --- src/main/webapp/file-request-access-popup-fragment.xhtml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/webapp/file-request-access-popup-fragment.xhtml b/src/main/webapp/file-request-access-popup-fragment.xhtml index 400eecfd3ac..f0715f6726a 100644 --- a/src/main/webapp/file-request-access-popup-fragment.xhtml +++ b/src/main/webapp/file-request-access-popup-fragment.xhtml @@ -42,7 +42,7 @@
    + update="@([id$=requestAccessConsolidated]), @([id$=requestPanel])" oncomplete="PF('requestAccessPopup').hide();">
    - +

    #{bundle['file.deleteFileDialog.tip']}

    #{bundle['file.deleteFileDialog.failed.tip']}

    @@ -619,9 +619,11 @@ action="#{EditDatafilesPage.restrictFiles(true)}" update="filesTable,:messagePanel" oncomplete="javascript:bind_bsui_components();" /> - +
    @@ -924,7 +926,17 @@ - +
    + + +

    #{bundle['dataset.noSelectedFiles']}

    +
    + +
    +
    diff --git a/src/main/webapp/editdatafiles.xhtml b/src/main/webapp/editdatafiles.xhtml index 1135980e984..3c371c0150c 100644 --- a/src/main/webapp/editdatafiles.xhtml +++ b/src/main/webapp/editdatafiles.xhtml @@ -86,45 +86,10 @@ - - -

    #{bundle['dataset.noSelectedFilesForRestrict']}

    -
    - -
    -
    - -

    #{bundle['dataset.noSelectedFilesForUnRestrict']}

    -
    - -
    -
    - -

    #{bundle['dataset.noSelectedFilesForDelete']}

    -
    - -
    -
    + - - diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index 320283cfe69..a6553e017d9 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -35,7 +35,7 @@
  • - @@ -47,7 +47,7 @@
  • @@ -58,7 +58,7 @@
  • @@ -90,7 +90,7 @@
  • @@ -113,7 +113,7 @@
  • diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index c14fbd00cd4..b6b0ebd3876 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -44,11 +44,15 @@
    - - + +
    @@ -72,51 +76,15 @@ - - -

    #{bundle['dataset.noSelectedFilesForRestrict']}

    -
    - -
    -
    - -

    #{bundle['dataset.noSelectedFilesForUnRestrict']}

    -
    - -
    -
    - -

    #{bundle['dataset.noSelectedFilesForDelete']}

    -
    - -
    -
    - -

    #{bundle['dataset.noSelectedFilesForMetadataEdit']}

    -
    - -
    -
    - -

    #{bundle['dataset.noSelectedFilesForMetadataEdit']}

    + + +

    #{bundle['dataset.noSelectedFiles']}

    -
    -
    +
    \ No newline at end of file From 71d17342eb9642eab6d973d0461b001df11d959f Mon Sep 17 00:00:00 2001 From: gdurand Date: Sun, 6 Dec 2020 14:11:22 -0500 Subject: [PATCH 139/179] tweak how "cancel" is handled for access popup --- .../edu/harvard/iq/dataverse/DatasetPage.java | 28 ++++++++++++++----- .../iq/dataverse/EditDatafilesPage.java | 24 ++++++++++------ .../edu/harvard/iq/dataverse/FilePage.java | 27 +++++++++++------- src/main/webapp/editFilesFragment.xhtml | 21 +++++++------- .../webapp/file-edit-button-fragment.xhtml | 11 +++++--- .../webapp/file-edit-popup-fragment.xhtml | 12 ++++---- src/main/webapp/file.xhtml | 1 + src/main/webapp/filesFragment.xhtml | 2 ++ 8 files changed, 80 insertions(+), 46 deletions(-) diff --git a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java index 047b8322357..6356c6577a6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/DatasetPage.java @@ -1936,8 +1936,6 @@ private String init(boolean initFull) { // init the citation displayCitation = dataset.getCitation(true, workingVersion); - clone = workingVersion.cloneDatasetVersion(); - if(workingVersion.isPublished()) { MakeDataCountEntry entry = new MakeDataCountEntry(FacesContext.getCurrentInstance(), dvRequestService, workingVersion); mdcLogService.logEntry(entry); @@ -2468,7 +2466,7 @@ public void edit(EditMode editMode) { dataset = datasetService.find(dataset.getId()); } workingVersion = dataset.getEditVersion(); - //clone = workingVersion.cloneDatasetVersion(); + clone = workingVersion.cloneDatasetVersion(); if (editMode == EditMode.INFO) { // ? } else if (editMode == EditMode.FILE) { @@ -3263,6 +3261,9 @@ private void restrictFiles(List filesToRestrict, boolean restricte if (workingVersion.isReleased()) { refreshSelectedFiles(filesToRestrict); } + + workingVersion.getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); + workingVersion.getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); Command cmd; for (FileMetadata fmd : filesToRestrict) { @@ -5575,9 +5576,22 @@ public void setFileMetadataForAction(FileMetadata fileMetadataForAction) { this.fileMetadataForAction = fileMetadataForAction; } - public void resetTerms() { - workingVersion.getTermsOfUseAndAccess().setTermsOfAccess(clone.getTermsOfUseAndAccess().getTermsOfAccess()); - workingVersion.getTermsOfUseAndAccess().setFileAccessRequest(clone.getTermsOfUseAndAccess().isFileAccessRequest()); - } + private String termsOfAccess; + private boolean fileAccessRequest; + + public String getTermsOfAccess() { + return termsOfAccess; + } + public void setTermsOfAccess(String termsOfAccess) { + this.termsOfAccess = termsOfAccess; + } + + public boolean isFileAccessRequest() { + return fileAccessRequest; + } + + public void setFileAccessRequest(boolean fileAccessRequest) { + this.fileAccessRequest = fileAccessRequest; + } } diff --git a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java index 2bab970fae7..98bb00c52f6 100644 --- a/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java +++ b/src/main/java/edu/harvard/iq/dataverse/EditDatafilesPage.java @@ -486,8 +486,6 @@ public String init() { workingVersion = dataset.getEditVersion(); clone = workingVersion.cloneDatasetVersion(); - termsOfAccess = workingVersion.getTermsOfUseAndAccess().getTermsOfAccess(); - fileAccessRequest = workingVersion.getTermsOfUseAndAccess().isFileAccessRequest(); //todo: can we remove this?? if (workingVersion == null || !workingVersion.isDraft()) { @@ -655,8 +653,8 @@ public void setVersionString(String versionString) { public void restrictFiles(boolean restricted) throws UnsupportedOperationException{ - termsOfAccess = workingVersion.getTermsOfUseAndAccess().getTermsOfAccess(); - fileAccessRequest = workingVersion.getTermsOfUseAndAccess().isFileAccessRequest(); + workingVersion.getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); + workingVersion.getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); String fileNames = null; @@ -2998,10 +2996,20 @@ private void populateFileMetadatas() { private String termsOfAccess; private boolean fileAccessRequest; - - public void resetTerms() { - workingVersion.getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); - workingVersion.getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); + public String getTermsOfAccess() { + return termsOfAccess; + } + + public void setTermsOfAccess(String termsOfAccess) { + this.termsOfAccess = termsOfAccess; + } + + public boolean isFileAccessRequest() { + return fileAccessRequest; + } + + public void setFileAccessRequest(boolean fileAccessRequest) { + this.fileAccessRequest = fileAccessRequest; } } diff --git a/src/main/java/edu/harvard/iq/dataverse/FilePage.java b/src/main/java/edu/harvard/iq/dataverse/FilePage.java index 720637df166..664ce72359c 100644 --- a/src/main/java/edu/harvard/iq/dataverse/FilePage.java +++ b/src/main/java/edu/harvard/iq/dataverse/FilePage.java @@ -376,9 +376,9 @@ public String saveProvFreeform(String freeformTextInput, DataFile dataFileFromPo public String restrictFile(boolean restricted) throws CommandException{ String fileNames = null; - String termsOfAccess = this.fileMetadata.getDatasetVersion().getTermsOfUseAndAccess().getTermsOfAccess(); - Boolean allowRequest = this.fileMetadata.getDatasetVersion().getTermsOfUseAndAccess().isFileAccessRequest(); editDataset = this.file.getOwner(); + editDataset.getEditVersion().getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); + editDataset.getEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); Command cmd; for (FileMetadata fmw : editDataset.getEditVersion().getFileMetadatas()) { @@ -390,9 +390,6 @@ public String restrictFile(boolean restricted) throws CommandException{ } } - editDataset.getEditVersion().getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); - editDataset.getEditVersion().getTermsOfUseAndAccess().setFileAccessRequest(allowRequest); - if (fileNames != null) { String successMessage = BundleUtil.getStringFromBundle("file.restricted.success"); successMessage = successMessage.replace("{0}", fileNames); @@ -1029,10 +1026,20 @@ public void showPreview(GuestbookResponse guestbookResponse) { private String termsOfAccess; private boolean fileAccessRequest; - - public void resetTerms() { - fileMetadata.getDatasetVersion().getTermsOfUseAndAccess().setTermsOfAccess(termsOfAccess); - fileMetadata.getDatasetVersion().getTermsOfUseAndAccess().setFileAccessRequest(fileAccessRequest); - } + public String getTermsOfAccess() { + return termsOfAccess; + } + + public void setTermsOfAccess(String termsOfAccess) { + this.termsOfAccess = termsOfAccess; + } + + public boolean isFileAccessRequest() { + return fileAccessRequest; + } + + public void setFileAccessRequest(boolean fileAccessRequest) { + this.fileAccessRequest = fileAccessRequest; + } } diff --git a/src/main/webapp/editFilesFragment.xhtml b/src/main/webapp/editFilesFragment.xhtml index 483af4b0c5e..b244c1a3679 100644 --- a/src/main/webapp/editFilesFragment.xhtml +++ b/src/main/webapp/editFilesFragment.xhtml @@ -421,9 +421,12 @@
  • - + +
  • @@ -597,8 +600,8 @@ data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title']}">
    - +
    @@ -608,8 +611,8 @@ data-toggle="tooltip" data-placement="auto top" data-original-title="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.requestAccess.title']}">
    - +
    @@ -619,11 +622,9 @@ action="#{EditDatafilesPage.restrictFiles(true)}" update="filesTable,:messagePanel" oncomplete="javascript:bind_bsui_components();" /> - + diff --git a/src/main/webapp/file-edit-button-fragment.xhtml b/src/main/webapp/file-edit-button-fragment.xhtml index a6553e017d9..bf8d354c841 100644 --- a/src/main/webapp/file-edit-button-fragment.xhtml +++ b/src/main/webapp/file-edit-button-fragment.xhtml @@ -18,7 +18,8 @@ This can be used for a single, specific file (passing "fileMetadata") or a group of selected files. - Parameters: + Parameters: + dataserVersion - the datasetversion object to associate with this view fileMetadata - for single file, the fileMetadata object of that file fileMetadataForAction - used by DatasetPage popups to identify single file (also clears for selected file) isDraftReplacementFile - for single file, if the file is a draft and already replacing a past file @@ -46,12 +47,14 @@
  • - - + + +
  • diff --git a/src/main/webapp/file-edit-popup-fragment.xhtml b/src/main/webapp/file-edit-popup-fragment.xhtml index b6b0ebd3876..e2c52f7bf86 100644 --- a/src/main/webapp/file-edit-popup-fragment.xhtml +++ b/src/main/webapp/file-edit-popup-fragment.xhtml @@ -28,7 +28,7 @@ data-toggle="tooltip" data-placement="auto right" data-original-title="#{bundle['file.dataFilesTab.terms.list.termsOfAccess.termsOfsAccess.title']}">
    - +
    @@ -39,7 +39,7 @@
    + value="#{bean.fileAccessRequest}" widgetVar="inputfar"/>
    @@ -48,11 +48,9 @@ onclick="PF('accessPopup').hide();" action="#{bean[restrictFileAction](true)}" update=":messagePanel" /> - + diff --git a/src/main/webapp/file.xhtml b/src/main/webapp/file.xhtml index 4e3b1dbd8e0..9c19273037c 100644 --- a/src/main/webapp/file.xhtml +++ b/src/main/webapp/file.xhtml @@ -225,6 +225,7 @@